[ 590.027439] env[61545]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61545) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 590.027801] env[61545]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61545) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 590.027915] env[61545]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61545) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 590.028241] env[61545]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 590.130493] env[61545]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61545) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 590.140837] env[61545]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61545) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 590.186179] env[61545]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 590.749797] env[61545]: INFO nova.virt.driver [None req-7c23ea6d-330e-4d20-9b2b-9a993e39532f None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 590.823670] env[61545]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.823830] env[61545]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.823958] env[61545]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61545) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 593.757609] env[61545]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-fd9150d1-725e-4e35-abb3-d9f87e601e67 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.774695] env[61545]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61545) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 593.774904] env[61545]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-7eec9f9c-545e-4309-8498-45c54f7142e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.808267] env[61545]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 6ab18. [ 593.808424] env[61545]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.985s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.808984] env[61545]: INFO nova.virt.vmwareapi.driver [None req-7c23ea6d-330e-4d20-9b2b-9a993e39532f None None] VMware vCenter version: 7.0.3 [ 593.812469] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8849d7-3c06-4f3f-be65-d8ba24a55df7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.834657] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71dfc707-6be7-4fea-9951-78e87d420b9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.841133] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f45d24-ec9e-4569-8523-c3745e82a87a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.848127] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be72b9e-b9cc-4782-a0e6-4f589138017b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.861342] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1305c6c-9e39-4cce-a3a9-a06c679d4f0e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.867745] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928cd45b-34a9-4385-9068-d1cda61aadb1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.899886] env[61545]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-cf1f15a5-9399-46c9-9829-7e2255a0f3b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.906145] env[61545]: DEBUG nova.virt.vmwareapi.driver [None req-7c23ea6d-330e-4d20-9b2b-9a993e39532f None None] Extension org.openstack.compute already exists. {{(pid=61545) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 593.908875] env[61545]: INFO nova.compute.provider_config [None req-7c23ea6d-330e-4d20-9b2b-9a993e39532f None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 594.411871] env[61545]: DEBUG nova.context [None req-7c23ea6d-330e-4d20-9b2b-9a993e39532f None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),5ab855bc-bf39-4329-b140-be635528d67d(cell1) {{(pid=61545) load_cells /opt/stack/nova/nova/context.py:464}} [ 594.414161] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.414418] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.415149] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.415598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Acquiring lock "5ab855bc-bf39-4329-b140-be635528d67d" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.415791] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Lock "5ab855bc-bf39-4329-b140-be635528d67d" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.416841] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Lock "5ab855bc-bf39-4329-b140-be635528d67d" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.437286] env[61545]: INFO dbcounter [None req-1f761982-c034-4adc-b507-dba783266033 None None] Registered counter for database nova_cell0 [ 594.446117] env[61545]: INFO dbcounter [None req-1f761982-c034-4adc-b507-dba783266033 None None] Registered counter for database nova_cell1 [ 594.900368] env[61545]: DEBUG oslo_db.sqlalchemy.engines [None req-1f761982-c034-4adc-b507-dba783266033 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61545) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 594.900812] env[61545]: DEBUG oslo_db.sqlalchemy.engines [None req-1f761982-c034-4adc-b507-dba783266033 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61545) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 594.906010] env[61545]: ERROR nova.db.main.api [None req-1f761982-c034-4adc-b507-dba783266033 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 594.906010] env[61545]: result = function(*args, **kwargs) [ 594.906010] env[61545]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.906010] env[61545]: return func(*args, **kwargs) [ 594.906010] env[61545]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 594.906010] env[61545]: result = fn(*args, **kwargs) [ 594.906010] env[61545]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 594.906010] env[61545]: return f(*args, **kwargs) [ 594.906010] env[61545]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 594.906010] env[61545]: return db.service_get_minimum_version(context, binaries) [ 594.906010] env[61545]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 594.906010] env[61545]: _check_db_access() [ 594.906010] env[61545]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 594.906010] env[61545]: stacktrace = ''.join(traceback.format_stack()) [ 594.906010] env[61545]: [ 594.906851] env[61545]: ERROR nova.db.main.api [None req-1f761982-c034-4adc-b507-dba783266033 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 594.906851] env[61545]: result = function(*args, **kwargs) [ 594.906851] env[61545]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.906851] env[61545]: return func(*args, **kwargs) [ 594.906851] env[61545]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 594.906851] env[61545]: result = fn(*args, **kwargs) [ 594.906851] env[61545]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 594.906851] env[61545]: return f(*args, **kwargs) [ 594.906851] env[61545]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 594.906851] env[61545]: return db.service_get_minimum_version(context, binaries) [ 594.906851] env[61545]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 594.906851] env[61545]: _check_db_access() [ 594.906851] env[61545]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 594.906851] env[61545]: stacktrace = ''.join(traceback.format_stack()) [ 594.906851] env[61545]: [ 594.907280] env[61545]: WARNING nova.objects.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 594.907395] env[61545]: WARNING nova.objects.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] Failed to get minimum service version for cell 5ab855bc-bf39-4329-b140-be635528d67d [ 594.907852] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Acquiring lock "singleton_lock" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.908018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Acquired lock "singleton_lock" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.908273] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1f761982-c034-4adc-b507-dba783266033 None None] Releasing lock "singleton_lock" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.908608] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] Full set of CONF: {{(pid=61545) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 594.908752] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ******************************************************************************** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 594.908879] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] Configuration options gathered from: {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 594.909030] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 594.909231] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 594.909363] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ================================================================================ {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 594.909573] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] allow_resize_to_same_host = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.909750] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] arq_binding_timeout = 300 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.909910] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] backdoor_port = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.910029] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] backdoor_socket = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.910203] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] block_device_allocate_retries = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.910366] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] block_device_allocate_retries_interval = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.910537] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cert = self.pem {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.910703] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.910877] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute_monitors = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.911308] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] config_dir = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.911507] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] config_drive_format = iso9660 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.911649] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.911830] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] config_source = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.912038] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] console_host = devstack {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.912229] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] control_exchange = nova {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.912389] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cpu_allocation_ratio = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.912553] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] daemon = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.912725] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] debug = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.912882] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] default_access_ip_network_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.913064] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] default_availability_zone = nova {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.913227] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] default_ephemeral_format = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.913386] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] default_green_pool_size = 1000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.913641] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.913803] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] default_schedule_zone = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.913987] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] disk_allocation_ratio = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.914186] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] enable_new_services = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.914372] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] enabled_apis = ['osapi_compute'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.914539] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] enabled_ssl_apis = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.914703] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] flat_injected = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.914864] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] force_config_drive = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.915064] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] force_raw_images = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.915254] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] graceful_shutdown_timeout = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.915419] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] heal_instance_info_cache_interval = -1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.915646] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] host = cpu-1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.915873] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.916098] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.916279] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.916497] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.916671] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] instance_build_timeout = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.916827] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] instance_delete_interval = 300 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.916996] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] instance_format = [instance: %(uuid)s] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.917180] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] instance_name_template = instance-%08x {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.917344] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] instance_usage_audit = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.917517] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] instance_usage_audit_period = month {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.917682] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.917847] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.918021] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] internal_service_availability_zone = internal {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.918179] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] key = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.918343] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] live_migration_retry_count = 30 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.918510] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_color = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.918670] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_config_append = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.918877] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.919090] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_dir = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.919263] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_file = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.919396] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_options = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.919558] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_rotate_interval = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.919728] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_rotate_interval_type = days {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.919894] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] log_rotation_type = none {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.920033] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.920161] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.920333] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.920497] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.920624] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.920782] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] long_rpc_timeout = 1800 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.920941] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] max_concurrent_builds = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.921115] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] max_concurrent_live_migrations = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.921276] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] max_concurrent_snapshots = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.921433] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] max_local_block_devices = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.921591] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] max_logfile_count = 30 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.921746] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] max_logfile_size_mb = 200 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.921925] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] maximum_instance_delete_attempts = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.922116] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] metadata_listen = 0.0.0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.922289] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] metadata_listen_port = 8775 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.922458] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] metadata_workers = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.922618] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] migrate_max_retries = -1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.922787] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] mkisofs_cmd = genisoimage {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.922994] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.923140] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] my_ip = 10.180.1.21 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.923351] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.923516] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] network_allocate_retries = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.923698] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.923866] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.924072] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] osapi_compute_listen_port = 8774 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.924270] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] osapi_compute_unique_server_name_scope = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.924429] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] osapi_compute_workers = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.924590] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] password_length = 12 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.924751] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] periodic_enable = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.924916] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] periodic_fuzzy_delay = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.925127] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] pointer_model = usbtablet {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.925307] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] preallocate_images = none {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.925473] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] publish_errors = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.925604] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] pybasedir = /opt/stack/nova {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.925764] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ram_allocation_ratio = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.925926] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] rate_limit_burst = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.926111] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] rate_limit_except_level = CRITICAL {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.926275] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] rate_limit_interval = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.926435] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] reboot_timeout = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.926594] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] reclaim_instance_interval = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.926747] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] record = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.926916] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] reimage_timeout_per_gb = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.927096] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] report_interval = 120 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.927262] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] rescue_timeout = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.927421] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] reserved_host_cpus = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.927580] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] reserved_host_disk_mb = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.927738] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] reserved_host_memory_mb = 512 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.927931] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] reserved_huge_pages = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.928133] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] resize_confirm_window = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.928302] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] resize_fs_using_block_device = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.928465] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] resume_guests_state_on_host_boot = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.928638] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.928798] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] rpc_response_timeout = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.928958] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] run_external_periodic_tasks = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.929145] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] running_deleted_instance_action = reap {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.929311] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.929465] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] running_deleted_instance_timeout = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.929623] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler_instance_sync_interval = 120 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.929790] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_down_time = 720 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.929957] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] servicegroup_driver = db {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.930130] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] shell_completion = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.930293] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] shelved_offload_time = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.930454] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] shelved_poll_interval = 3600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.930622] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] shutdown_timeout = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.930784] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] source_is_ipv6 = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.930943] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ssl_only = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.931216] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.931390] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] sync_power_state_interval = 600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.931553] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] sync_power_state_pool_size = 1000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.931721] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] syslog_log_facility = LOG_USER {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.931895] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] tempdir = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.932081] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] timeout_nbd = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.932259] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] transport_url = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.932420] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] update_resources_interval = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.932583] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] use_cow_images = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.932744] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] use_journal = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.932902] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] use_json = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.933081] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] use_rootwrap_daemon = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.933245] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] use_stderr = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.933404] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] use_syslog = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.933557] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vcpu_pin_set = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.933723] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plugging_is_fatal = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.933886] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plugging_timeout = 300 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.934087] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] virt_mkfs = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.934258] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] volume_usage_poll_interval = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.934421] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] watch_log_file = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.934590] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] web = /usr/share/spice-html5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.934774] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.934959] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.935168] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.935348] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_concurrency.disable_process_locking = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.935636] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.935817] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.935987] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.936176] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.936349] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.936535] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.936704] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.auth_strategy = keystone {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.936871] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.compute_link_prefix = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.937060] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.937307] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.dhcp_domain = novalocal {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.937586] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.enable_instance_password = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.937782] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.glance_link_prefix = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.937958] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.938156] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.938328] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.instance_list_per_project_cells = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.938493] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.list_records_by_skipping_down_cells = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.938660] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.local_metadata_per_cell = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.938829] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.max_limit = 1000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.938998] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.metadata_cache_expiration = 15 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.939193] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.neutron_default_tenant_id = default {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.939368] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.response_validation = warn {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.939543] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.use_neutron_default_nets = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.939712] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.939876] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.940111] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.940308] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.940487] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.vendordata_dynamic_targets = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.940654] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.vendordata_jsonfile_path = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.940835] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.941046] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.backend = dogpile.cache.memcached {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.941227] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.backend_argument = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.941388] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.backend_expiration_time = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.941558] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.config_prefix = cache.oslo {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.941729] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.dead_timeout = 60.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.941894] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.debug_cache_backend = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.942070] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.enable_retry_client = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.942241] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.enable_socket_keepalive = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.942415] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.enabled = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.942577] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.enforce_fips_mode = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.942740] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.expiration_time = 600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.942903] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.hashclient_retry_attempts = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.943080] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.943251] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_dead_retry = 300 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.943410] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_password = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.943571] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.943733] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.943894] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_pool_maxsize = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_sasl_enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.memcache_username = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.proxies = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.redis_db = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945464] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.redis_password = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945464] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945588] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945740] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.redis_server = localhost:6379 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.945910] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.redis_socket_timeout = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.946088] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.redis_username = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.946269] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.retry_attempts = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.946450] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.retry_delay = 0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.946614] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.socket_keepalive_count = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.946773] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.socket_keepalive_idle = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.946935] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.socket_keepalive_interval = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.947110] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.tls_allowed_ciphers = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.947269] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.tls_cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.947426] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.tls_certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.947587] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.tls_enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.947743] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cache.tls_keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.947911] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.948098] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.auth_type = password {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.948265] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.948440] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.948599] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.948763] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.948925] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.cross_az_attach = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.949101] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.debug = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.949264] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.endpoint_template = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.949429] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.http_retries = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.949594] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.949753] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.949927] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.os_region_name = RegionOne {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.950116] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.950305] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cinder.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.950452] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.950611] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.cpu_dedicated_set = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.950769] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.cpu_shared_set = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.950935] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.image_type_exclude_list = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.951115] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.951282] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.951446] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.951610] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.951777] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.951978] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.resource_provider_association_refresh = 300 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.952169] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.952335] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.shutdown_retry_interval = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.952520] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.952702] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] conductor.workers = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.952883] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] console.allowed_origins = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.953059] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] console.ssl_ciphers = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.953240] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] console.ssl_minimum_version = default {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.953411] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] consoleauth.enforce_session_timeout = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.953580] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] consoleauth.token_ttl = 600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.954150] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.954150] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.954150] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.954296] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.connect_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.954430] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.connect_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.954586] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.endpoint_override = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.954751] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.954908] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.955084] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.max_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.955249] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.min_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.955406] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.region_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.955565] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.retriable_status_codes = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.955722] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.955893] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.service_type = accelerator {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.956067] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.956254] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.status_code_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.956413] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.status_code_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.956575] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.956760] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.956926] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] cyborg.version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.957115] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.asyncio_connection = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.957312] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.asyncio_slave_connection = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.957449] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.backend = sqlalchemy {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.957617] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.connection = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.957782] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.connection_debug = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.957953] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.connection_parameters = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.958135] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.connection_recycle_time = 3600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.958301] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.connection_trace = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.958466] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.db_inc_retry_interval = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.958628] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.db_max_retries = 20 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.958791] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.db_max_retry_interval = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.958954] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.db_retry_interval = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.959134] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.max_overflow = 50 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.959299] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.max_pool_size = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.959459] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.max_retries = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.959627] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.959787] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.mysql_wsrep_sync_wait = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.959943] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.pool_timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.960120] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.retry_interval = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.960281] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.slave_connection = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.960445] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.sqlite_synchronous = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.960606] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] database.use_db_reconnect = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.960772] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.asyncio_connection = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.960931] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.asyncio_slave_connection = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.961117] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.backend = sqlalchemy {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.961289] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.connection = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.961451] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.connection_debug = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.961619] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.connection_parameters = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.961781] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.connection_recycle_time = 3600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.961976] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.connection_trace = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.962166] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.db_inc_retry_interval = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.962333] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.db_max_retries = 20 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.962496] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.db_max_retry_interval = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.962664] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.db_retry_interval = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.962825] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.max_overflow = 50 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.962987] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.max_pool_size = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.963168] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.max_retries = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.963339] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.963499] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.963656] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.pool_timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.963819] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.retry_interval = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.964040] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.slave_connection = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.964230] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] api_database.sqlite_synchronous = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.964412] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] devices.enabled_mdev_types = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.964596] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.964771] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.964940] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ephemeral_storage_encryption.enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.965132] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.965308] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.api_servers = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.965475] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.965637] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.965802] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.965962] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.connect_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.966140] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.connect_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.966369] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.debug = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.966501] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.default_trusted_certificate_ids = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.966667] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.enable_certificate_validation = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.966830] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.enable_rbd_download = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.966991] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.endpoint_override = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.967175] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.967340] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.967500] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.max_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.967655] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.min_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.967817] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.num_retries = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.967985] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.rbd_ceph_conf = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.968165] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.rbd_connect_timeout = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.968337] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.rbd_pool = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.968505] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.rbd_user = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.968674] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.region_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.968837] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.retriable_status_codes = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.968997] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.969184] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.service_type = image {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.969349] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.969509] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.status_code_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.969668] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.status_code_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.969826] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.970017] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.970201] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.verify_glance_signatures = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.970364] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] glance.version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.970534] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] guestfs.debug = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.970704] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.970867] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.auth_type = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.971036] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.971201] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.971367] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.971526] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.connect_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.971685] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.connect_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.971860] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.endpoint_override = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.972054] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.972222] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.972386] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.max_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.972546] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.min_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.972711] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.region_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.972869] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.retriable_status_codes = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.973040] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.973220] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.service_type = shared-file-system {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.973386] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.share_apply_policy_timeout = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.973552] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.973712] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.status_code_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.973873] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.status_code_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.974071] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.974268] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.974433] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] manila.version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.974605] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] mks.enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.974973] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.975178] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] image_cache.manager_interval = 2400 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.975351] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] image_cache.precache_concurrency = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.975520] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] image_cache.remove_unused_base_images = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.975688] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.975855] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.976078] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] image_cache.subdirectory_name = _base {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.976289] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.api_max_retries = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.976478] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.api_retry_interval = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.976636] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.976800] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.auth_type = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.976960] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.977144] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.977314] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.977477] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.conductor_group = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.977637] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.connect_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.977796] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.connect_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.977976] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.endpoint_override = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.978139] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.978356] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.978542] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.max_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.978705] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.min_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.978874] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.peer_list = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.979048] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.region_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.979218] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.retriable_status_codes = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.979383] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.serial_console_state_timeout = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.979544] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.979716] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.service_type = baremetal {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.979877] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.shard = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.980061] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.980230] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.status_code_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.980392] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.status_code_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.980549] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.980730] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.980893] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ironic.version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.981094] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.981277] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] key_manager.fixed_key = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.981464] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.981627] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.barbican_api_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.981788] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.barbican_endpoint = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.981984] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.barbican_endpoint_type = public {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.982166] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.barbican_region_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.982328] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.982487] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.982655] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.982811] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.982969] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.983151] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.number_of_retries = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.983315] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.retry_delay = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.983478] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.send_service_user_token = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.983642] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.983798] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.983984] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.verify_ssl = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.984166] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican.verify_ssl_path = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.984338] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.984502] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.auth_type = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.984660] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.984819] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.984983] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.985164] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.985322] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.985485] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.985643] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] barbican_service_user.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.985809] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.approle_role_id = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.985969] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.approle_secret_id = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.986156] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.kv_mountpoint = secret {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.986335] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.kv_path = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.986509] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.kv_version = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.986672] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.namespace = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.986830] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.root_token_id = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.986987] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.ssl_ca_crt_file = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.987174] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.timeout = 60.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.987339] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.use_ssl = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.987510] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.987689] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.987834] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.988037] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.988230] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.connect_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.988396] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.connect_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.988556] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.endpoint_override = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.988721] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.988879] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.989059] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.max_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.989226] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.min_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.989389] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.region_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.989553] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.retriable_status_codes = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.989713] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.989884] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.service_type = identity {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.990069] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.990234] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.status_code_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.990393] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.status_code_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.990549] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.990732] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.990886] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] keystone.version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.991090] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.ceph_mount_options = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.991526] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.991717] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.connection_uri = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.991901] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.cpu_mode = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.992098] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.992279] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.cpu_models = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.992453] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.cpu_power_governor_high = performance {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.992623] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.992788] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.cpu_power_management = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.992960] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.993146] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.device_detach_attempts = 8 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.993315] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.device_detach_timeout = 20 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.993482] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.disk_cachemodes = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.993641] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.disk_prefix = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.993805] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.enabled_perf_events = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.993999] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.file_backed_memory = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.994189] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.gid_maps = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.994352] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.hw_disk_discard = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.994512] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.hw_machine_type = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.994680] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.images_rbd_ceph_conf = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.994845] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.995017] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.995198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.images_rbd_glance_store_name = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.995366] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.images_rbd_pool = rbd {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.995537] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.images_type = default {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.995693] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.images_volume_group = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.995853] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.inject_key = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.996024] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.inject_partition = -2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.996193] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.inject_password = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.996379] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.iscsi_iface = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.996547] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.iser_use_multipath = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.996718] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.996872] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.997046] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_downtime = 500 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.997216] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.997378] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.997538] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_inbound_addr = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.997700] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.997862] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.998029] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_scheme = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.998208] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_timeout_action = abort {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.998371] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_tunnelled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.998530] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_uri = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.998691] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.live_migration_with_native_tls = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.998851] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.max_queues = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.999022] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.999259] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.999425] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.nfs_mount_options = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.999723] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.999902] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.000108] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.000308] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.000488] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.000657] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.num_pcie_ports = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.000829] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.000993] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.pmem_namespaces = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.001174] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.quobyte_client_cfg = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.001463] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.001641] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.001807] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.001971] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.002148] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rbd_secret_uuid = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.002310] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rbd_user = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.002472] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.002644] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.002807] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rescue_image_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.002964] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rescue_kernel_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.003139] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rescue_ramdisk_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.003311] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.003475] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.rx_queue_size = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.003645] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.smbfs_mount_options = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.003952] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.004156] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.snapshot_compression = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.004328] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.snapshot_image_format = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.004568] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.004741] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.sparse_logical_volumes = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.004909] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.swtpm_enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.005099] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.swtpm_group = tss {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.005278] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.swtpm_user = tss {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.005451] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.sysinfo_serial = unique {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.005614] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.tb_cache_size = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.005775] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.tx_queue_size = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.005943] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.uid_maps = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.006135] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.use_virtio_for_bridges = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.006331] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.virt_type = kvm {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.006515] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.volume_clear = zero {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.006684] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.volume_clear_size = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.006853] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.volume_enforce_multipath = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.007033] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.volume_use_multipath = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.007205] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.vzstorage_cache_path = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.007377] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.007547] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.007716] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.007909] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.008191] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.008379] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.vzstorage_mount_user = stack {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.008550] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.008728] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.008907] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.auth_type = password {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.009083] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.009250] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.009415] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.009577] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.connect_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.009737] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.connect_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.009910] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.default_floating_pool = public {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.010424] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.endpoint_override = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.010424] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.extension_sync_interval = 600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.010424] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.http_retries = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.010565] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.010728] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.010933] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.max_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.011072] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.011240] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.min_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.011412] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.ovs_bridge = br-int {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.011579] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.physnets = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.011750] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.region_name = RegionOne {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.011949] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.retriable_status_codes = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.012160] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.service_metadata_proxy = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.012329] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.012503] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.service_type = network {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.012671] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.012833] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.status_code_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.012993] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.status_code_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.013171] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.013357] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.013522] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] neutron.version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.013695] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] notifications.bdms_in_notifications = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.013873] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] notifications.default_level = INFO {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.014077] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] notifications.include_share_mapping = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.014265] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] notifications.notification_format = unversioned {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.014434] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] notifications.notify_on_state_change = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.014614] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.014792] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] pci.alias = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.014965] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] pci.device_spec = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.015151] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] pci.report_in_placement = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.015327] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.015502] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.auth_type = password {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.015673] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.015838] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.015998] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.016181] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.016365] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.connect_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.016532] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.connect_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.016697] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.default_domain_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.016854] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.default_domain_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.017033] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.domain_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.017205] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.domain_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.017366] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.endpoint_override = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.017530] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.017689] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.017876] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.max_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.018076] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.min_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.018258] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.password = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.018422] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.project_domain_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.018591] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.project_domain_name = Default {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.018758] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.project_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.018933] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.project_name = service {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.019119] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.region_name = RegionOne {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.019284] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.retriable_status_codes = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.019447] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.019618] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.service_type = placement {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.019782] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.019976] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.status_code_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.020198] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.status_code_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.020367] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.system_scope = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.020529] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.020689] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.trust_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.020848] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.user_domain_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.021044] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.user_domain_name = Default {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.021194] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.user_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.021371] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.username = nova {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.021554] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.021718] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] placement.version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.021939] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.cores = 20 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.022091] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.count_usage_from_placement = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.022272] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.022443] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.injected_file_content_bytes = 10240 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.022611] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.injected_file_path_length = 255 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.022776] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.injected_files = 5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.022942] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.instances = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.023128] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.key_pairs = 100 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.023295] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.metadata_items = 128 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.023460] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.ram = 51200 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.023627] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.recheck_quota = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.023795] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.server_group_members = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.024024] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.server_groups = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.024265] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.024449] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] quota.unified_limits_resource_strategy = require {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.024627] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.024794] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.024959] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.image_metadata_prefilter = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.025138] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.025305] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.max_attempts = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.025468] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.max_placement_results = 1000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.025631] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.025792] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.025952] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.026150] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] scheduler.workers = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.026347] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.026531] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.026715] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.026886] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.027070] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.027234] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.027401] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.027591] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.027761] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.host_subset_size = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.027927] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.028144] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.028274] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.028440] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.028618] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.028793] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.isolated_hosts = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.028960] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.isolated_images = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.029140] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.029303] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.029465] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.029627] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.pci_in_placement = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.029789] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.029951] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.030132] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.030298] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.030455] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.030616] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.030780] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.track_instance_changes = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.030958] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.031144] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] metrics.required = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.031310] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] metrics.weight_multiplier = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.031472] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.031638] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] metrics.weight_setting = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.031998] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.032192] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] serial_console.enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.032373] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] serial_console.port_range = 10000:20000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.032546] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.032715] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.032882] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] serial_console.serialproxy_port = 6083 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.033065] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.033248] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.auth_type = password {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.033412] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.033569] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.033733] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.033895] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.034100] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.034288] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.send_service_user_token = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.034454] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.034611] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] service_user.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.034782] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.agent_enabled = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.034946] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.035298] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.035509] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.035682] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.html5proxy_port = 6082 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.035844] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.image_compression = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.036045] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.jpeg_compression = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.036235] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.playback_compression = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.036423] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.require_secure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.036599] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.server_listen = 127.0.0.1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.036772] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.037065] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.037249] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.streaming_mode = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.037413] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] spice.zlib_compression = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.037583] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] upgrade_levels.baseapi = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.037758] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] upgrade_levels.compute = auto {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.037920] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] upgrade_levels.conductor = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.038093] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] upgrade_levels.scheduler = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.038267] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.038431] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.038591] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.038750] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.038913] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.039087] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.039254] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.039418] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.039577] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vendordata_dynamic_auth.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.039751] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.api_retry_count = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.039912] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.ca_file = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.040097] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.040276] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.cluster_name = testcl1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.040442] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.connection_pool_size = 10 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.040602] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.console_delay_seconds = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.040773] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.datastore_regex = ^datastore.* {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.040987] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.041187] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.host_password = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.041353] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.host_port = 443 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.041522] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.host_username = administrator@vsphere.local {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.041692] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.insecure = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.041875] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.integration_bridge = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.042072] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.maximum_objects = 100 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.042242] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.pbm_default_policy = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.042407] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.pbm_enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.042566] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.pbm_wsdl_location = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.042738] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.042898] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.serial_port_proxy_uri = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.043079] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.serial_port_service_uri = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.043256] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.task_poll_interval = 0.5 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.043428] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.use_linked_clone = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.043600] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.vnc_keymap = en-us {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.043764] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.vnc_port = 5900 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.043953] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vmware.vnc_port_total = 10000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.044163] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.auth_schemes = ['none'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.044349] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.044637] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.044825] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.045008] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.novncproxy_port = 6080 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.045233] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.server_listen = 127.0.0.1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.045420] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.045585] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.vencrypt_ca_certs = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.045746] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.vencrypt_client_cert = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.045906] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vnc.vencrypt_client_key = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.046112] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.046287] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.disable_deep_image_inspection = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.046456] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.046621] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.046793] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.046959] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.disable_rootwrap = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.047144] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.enable_numa_live_migration = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.047313] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.047477] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.047642] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.047806] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.libvirt_disable_apic = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.047972] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.048190] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.048633] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.048703] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.048851] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.049034] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.049205] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.049370] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.049531] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.049697] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.049881] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.050070] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.client_socket_timeout = 900 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.050244] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.default_pool_size = 1000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.050411] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.keep_alive = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.050576] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.max_header_line = 16384 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.050739] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.050900] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.ssl_ca_file = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.051072] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.ssl_cert_file = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.051240] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.ssl_key_file = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.051406] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.tcp_keepidle = 600 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.051577] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.051742] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] zvm.ca_file = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.051922] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] zvm.cloud_connector_url = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.052252] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.052432] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] zvm.reachable_timeout = 300 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.052606] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.052785] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.052963] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.connection_string = messaging:// {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.053152] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.enabled = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.053329] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.es_doc_type = notification {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.053492] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.es_scroll_size = 10000 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.053663] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.es_scroll_time = 2m {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.053829] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.filter_error_trace = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.054039] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.hmac_keys = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.054233] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.sentinel_service_name = mymaster {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.054408] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.socket_timeout = 0.1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.054575] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.trace_requests = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.054741] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler.trace_sqlalchemy = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.054931] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler_jaeger.process_tags = {} {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.055130] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler_jaeger.service_name_prefix = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.055309] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] profiler_otlp.service_name_prefix = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.055477] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] remote_debug.host = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.055637] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] remote_debug.port = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.055816] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.055979] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.056163] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.056324] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.056487] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.056646] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.056806] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.056970] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.057144] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.057316] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.057475] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.057644] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.057812] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.057980] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.058179] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.058392] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.058566] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.058730] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.058904] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.059081] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.059254] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.059423] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.059594] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.059761] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.059925] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.060138] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.060327] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.060497] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.060665] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.060831] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.060997] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.ssl = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.061191] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.061366] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.061534] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.061704] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.061871] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.062048] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.062247] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.062419] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_notifications.retry = -1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.062598] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.062772] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.062952] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.auth_section = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.063138] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.auth_type = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.063300] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.cafile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.063459] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.certfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.063625] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.collect_timing = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.063783] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.connect_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.063962] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.connect_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.064153] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.endpoint_id = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.064332] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.endpoint_interface = publicURL {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.064496] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.endpoint_override = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.064654] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.endpoint_region_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.064812] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.endpoint_service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.064972] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.endpoint_service_type = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.065180] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.insecure = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.065350] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.keyfile = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.065510] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.max_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.065669] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.min_version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.065827] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.region_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.065994] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.retriable_status_codes = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.066170] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.service_name = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.066332] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.service_type = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.066497] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.split_loggers = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.066656] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.status_code_retries = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.066816] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.status_code_retry_delay = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.066975] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.timeout = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.067154] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.valid_interfaces = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.067314] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_limit.version = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.067526] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_reports.file_event_handler = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.067645] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.067806] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] oslo_reports.log_dir = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.067979] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.068160] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.068322] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.068491] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.068657] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.068816] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.068992] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.069169] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_ovs_privileged.group = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.069331] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.069496] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.069658] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.069817] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] vif_plug_ovs_privileged.user = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.069987] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.070188] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.070368] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.070543] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.070715] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.070888] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.071069] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.071239] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.071423] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.071596] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_ovs.isolate_vif = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.071764] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.071969] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.072313] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.072505] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.072702] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] os_vif_ovs.per_port_bridge = False {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.072880] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] privsep_osbrick.capabilities = [21] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.073081] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] privsep_osbrick.group = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.073253] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] privsep_osbrick.helper_command = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.073425] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.073657] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.073839] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] privsep_osbrick.user = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.074062] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.074242] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] nova_sys_admin.group = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.074406] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] nova_sys_admin.helper_command = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.074577] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.074745] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.074942] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] nova_sys_admin.user = None {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 595.075049] env[61545]: DEBUG oslo_service.backend.eventlet.service [None req-1f761982-c034-4adc-b507-dba783266033 None None] ******************************************************************************** {{(pid=61545) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 595.075474] env[61545]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 595.579110] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Getting list of instances from cluster (obj){ [ 595.579110] env[61545]: value = "domain-c8" [ 595.579110] env[61545]: _type = "ClusterComputeResource" [ 595.579110] env[61545]: } {{(pid=61545) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 595.580116] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07eb7dd-c336-40bf-ad9b-448bf685566e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.589751] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Got total of 0 instances {{(pid=61545) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 595.590436] env[61545]: WARNING nova.virt.vmwareapi.driver [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 595.590989] env[61545]: INFO nova.virt.node [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Generated node identity 7015027d-c4e1-4938-ac31-6e4672774d7e [ 595.591288] env[61545]: INFO nova.virt.node [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Wrote node identity 7015027d-c4e1-4938-ac31-6e4672774d7e to /opt/stack/data/n-cpu-1/compute_id [ 596.094581] env[61545]: WARNING nova.compute.manager [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Compute nodes ['7015027d-c4e1-4938-ac31-6e4672774d7e'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 597.101020] env[61545]: INFO nova.compute.manager [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 598.106287] env[61545]: WARNING nova.compute.manager [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 598.106715] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.106808] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.106964] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.107135] env[61545]: DEBUG nova.compute.resource_tracker [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 598.108106] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d9381c-4b2d-4671-acd3-bf37734c80bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.116735] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1270ab-bd5a-4305-81a9-e4e775b82e1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.133345] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b6130a-bc47-4f75-a89a-e4a2611d0124 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.140202] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed234ca-59b5-41ec-89cc-eed0b51796a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.168974] env[61545]: DEBUG nova.compute.resource_tracker [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180759MB free_disk=247GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 598.169131] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.169346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.673352] env[61545]: WARNING nova.compute.resource_tracker [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] No compute node record for cpu-1:7015027d-c4e1-4938-ac31-6e4672774d7e: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 7015027d-c4e1-4938-ac31-6e4672774d7e could not be found. [ 599.176477] env[61545]: INFO nova.compute.resource_tracker [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 7015027d-c4e1-4938-ac31-6e4672774d7e [ 600.687025] env[61545]: DEBUG nova.compute.resource_tracker [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 600.687025] env[61545]: DEBUG nova.compute.resource_tracker [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=250GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 600.889210] env[61545]: INFO nova.scheduler.client.report [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] [req-e6b9917c-5b20-4499-8d64-48fad6163916] Created resource provider record via placement API for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 600.906018] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81699f42-8517-423c-9505-50e665b9a151 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.913431] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5517c0e9-05d0-4c10-a0ea-f0999a106fb1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.945444] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f2d2f1-f6ab-4a75-b04f-a93281f3b6b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.955175] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c5b2bd-151c-46d9-8b38-b84ffcb8db53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.969501] env[61545]: DEBUG nova.compute.provider_tree [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 601.508035] env[61545]: DEBUG nova.scheduler.client.report [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 601.508035] env[61545]: DEBUG nova.compute.provider_tree [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 0 to 1 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 601.508035] env[61545]: DEBUG nova.compute.provider_tree [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 601.562953] env[61545]: DEBUG nova.compute.provider_tree [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 1 to 2 during operation: update_traits {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 602.070414] env[61545]: DEBUG nova.compute.resource_tracker [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 602.070414] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.899s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.070414] env[61545]: DEBUG nova.service [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Creating RPC server for service compute {{(pid=61545) start /opt/stack/nova/nova/service.py:186}} [ 602.084891] env[61545]: DEBUG nova.service [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] Join ServiceGroup membership for this service compute {{(pid=61545) start /opt/stack/nova/nova/service.py:203}} [ 602.085113] env[61545]: DEBUG nova.servicegroup.drivers.db [None req-2bff315c-b1a7-451b-86f2-eab8bf7d1739 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61545) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 636.090026] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.592962] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Getting list of instances from cluster (obj){ [ 636.592962] env[61545]: value = "domain-c8" [ 636.592962] env[61545]: _type = "ClusterComputeResource" [ 636.592962] env[61545]: } {{(pid=61545) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 636.594845] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce194f6-e5ec-4d7b-a721-f0b7694fad0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.608046] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Got total of 0 instances {{(pid=61545) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 636.608509] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.609017] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Getting list of instances from cluster (obj){ [ 636.609017] env[61545]: value = "domain-c8" [ 636.609017] env[61545]: _type = "ClusterComputeResource" [ 636.609017] env[61545]: } {{(pid=61545) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 636.610865] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fc1d83-f7ed-49cf-abcf-3a64a1b41515 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.620872] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Got total of 0 instances {{(pid=61545) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 641.130031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.130031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.635143] env[61545]: DEBUG nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.071608] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquiring lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.071843] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.178454] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.181111] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.181355] env[61545]: INFO nova.compute.claims [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.576341] env[61545]: DEBUG nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 643.114514] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.243465] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquiring lock "256e48c1-81de-4d32-97dc-ba80541a9239" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.243763] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "256e48c1-81de-4d32-97dc-ba80541a9239" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.315064] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2116bae5-b8b2-4f14-86e8-349e0503ea1f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.329098] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b707374c-92e6-4af1-8456-32b094333724 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.367796] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774e1d8c-32c6-4ebc-bd8a-c4861be6a9b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.378245] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cd82b0-6291-44b2-9c36-b38165ec373a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.393458] env[61545]: DEBUG nova.compute.provider_tree [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.747050] env[61545]: DEBUG nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 643.753796] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquiring lock "5d9eadff-7f13-4720-8119-5829b4802c21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.754041] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "5d9eadff-7f13-4720-8119-5829b4802c21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.779166] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquiring lock "579fb20b-083f-4227-9a13-c0f1ea36e272" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.780194] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "579fb20b-083f-4227-9a13-c0f1ea36e272" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.897394] env[61545]: DEBUG nova.scheduler.client.report [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 644.256451] env[61545]: DEBUG nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 644.279763] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.281413] env[61545]: DEBUG nova.compute.manager [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 644.402122] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.402788] env[61545]: DEBUG nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 644.407463] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.293s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.409142] env[61545]: INFO nova.compute.claims [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.641622] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "eced4107-b99e-479e-b22c-2157320ecf95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.641873] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.794847] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.815518] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.920455] env[61545]: DEBUG nova.compute.utils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 644.923290] env[61545]: DEBUG nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 644.924238] env[61545]: DEBUG nova.network.neutron [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 645.145334] env[61545]: DEBUG nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 645.253153] env[61545]: DEBUG nova.policy [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f46ceb43ab3d40edbb17fdd7e8f6bd35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e854a184de8e4b02aa3594b81c6d99f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 645.435763] env[61545]: DEBUG nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 645.574944] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bb8669-5ac1-4ebe-920a-3d484e4c97dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.585689] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e263adc-69ca-4f8b-8612-444c20672d14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.618587] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9360ee64-de82-43fa-a450-1248f2a7fc38 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.627669] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7bcfe1-fab5-49ca-8dd3-b3e94aae1086 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.647533] env[61545]: DEBUG nova.compute.provider_tree [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.676911] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.902919] env[61545]: DEBUG nova.network.neutron [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Successfully created port: d548df43-1a40-42e8-88e2-6487812a1612 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 646.151409] env[61545]: DEBUG nova.scheduler.client.report [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 646.452042] env[61545]: DEBUG nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 646.486851] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 646.486946] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.488112] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 646.488112] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.488112] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 646.488112] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 646.488112] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 646.488374] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 646.488374] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 646.488587] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 646.488692] env[61545]: DEBUG nova.virt.hardware [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 646.490028] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ea41fd-2526-4a36-9da9-e2e023935989 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.500053] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bd28e0-e0cb-4a9c-8a81-1c146402c49d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.529154] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ce42dc-7b29-4056-9dc4-470b11c2e45e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.662128] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.662128] env[61545]: DEBUG nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 646.670218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.388s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.670218] env[61545]: INFO nova.compute.claims [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.176938] env[61545]: DEBUG nova.compute.utils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 647.186799] env[61545]: DEBUG nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 647.186799] env[61545]: DEBUG nova.network.neutron [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 647.314185] env[61545]: DEBUG nova.policy [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '930908746ad94803bda00f91a41ec973', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1052fbf2fd0047e49d219d5faeb34af0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 647.697195] env[61545]: DEBUG nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 647.853032] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31dbc46-cda8-41e9-adf4-57b39bf47acc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.862997] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4600accf-ea46-4f21-8835-5b45c8c33dce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.906528] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23db897d-f256-4598-a562-b8129b8b1cf6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.918138] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb7b2f0-6ee0-4402-90bc-2d2ba81feb05 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.936594] env[61545]: DEBUG nova.compute.provider_tree [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.968812] env[61545]: DEBUG nova.network.neutron [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Successfully created port: 25b3b54b-4b63-407e-bd50-625dca707982 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.440403] env[61545]: DEBUG nova.scheduler.client.report [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 648.710800] env[61545]: DEBUG nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 648.746174] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 648.746576] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.746895] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 648.747240] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.747593] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 648.747790] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 648.748019] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 648.748185] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 648.748354] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 648.748519] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 648.748691] env[61545]: DEBUG nova.virt.hardware [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 648.750223] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2674be58-8666-4b10-beec-e816fbc15d06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.759553] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf292c8-8344-439f-a079-40d791fd43b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.946954] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.947538] env[61545]: DEBUG nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 648.953137] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.159s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.957352] env[61545]: INFO nova.compute.claims [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.960575] env[61545]: DEBUG nova.network.neutron [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Successfully updated port: d548df43-1a40-42e8-88e2-6487812a1612 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 649.464279] env[61545]: DEBUG nova.compute.utils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 649.473413] env[61545]: DEBUG nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 649.473413] env[61545]: DEBUG nova.network.neutron [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 649.476301] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "refresh_cache-2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.476770] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquired lock "refresh_cache-2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.476988] env[61545]: DEBUG nova.network.neutron [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.647407] env[61545]: DEBUG nova.policy [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa33a5559dd647fe991b71ee67443081', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35d3b51d6e8c48c69cbf99d32114d8f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 649.975384] env[61545]: DEBUG nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 650.121859] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b427c76f-ab8a-4e58-b91e-57a81a23fd3a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.130976] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce486a55-3e77-4462-be6a-963fbf1a0746 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.171837] env[61545]: DEBUG nova.network.neutron [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.174536] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5ac7ce-1287-4be4-8282-f4bf35081539 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.182915] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435458de-ac7f-423f-a28a-b03fd48ed4e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.203995] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.204703] env[61545]: DEBUG nova.compute.provider_tree [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.206028] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.206186] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.206373] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.206778] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.206970] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.207166] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.207329] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 650.207470] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.576420] env[61545]: DEBUG nova.compute.manager [req-8a3f60bd-0df4-42c6-aa13-d4c5d42da598 req-003f038b-ea82-412e-89de-e0b6c47b12d3 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Received event network-vif-plugged-d548df43-1a40-42e8-88e2-6487812a1612 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 650.578360] env[61545]: DEBUG oslo_concurrency.lockutils [req-8a3f60bd-0df4-42c6-aa13-d4c5d42da598 req-003f038b-ea82-412e-89de-e0b6c47b12d3 service nova] Acquiring lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.578796] env[61545]: DEBUG oslo_concurrency.lockutils [req-8a3f60bd-0df4-42c6-aa13-d4c5d42da598 req-003f038b-ea82-412e-89de-e0b6c47b12d3 service nova] Lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.578865] env[61545]: DEBUG oslo_concurrency.lockutils [req-8a3f60bd-0df4-42c6-aa13-d4c5d42da598 req-003f038b-ea82-412e-89de-e0b6c47b12d3 service nova] Lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.579082] env[61545]: DEBUG nova.compute.manager [req-8a3f60bd-0df4-42c6-aa13-d4c5d42da598 req-003f038b-ea82-412e-89de-e0b6c47b12d3 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] No waiting events found dispatching network-vif-plugged-d548df43-1a40-42e8-88e2-6487812a1612 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 650.579277] env[61545]: WARNING nova.compute.manager [req-8a3f60bd-0df4-42c6-aa13-d4c5d42da598 req-003f038b-ea82-412e-89de-e0b6c47b12d3 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Received unexpected event network-vif-plugged-d548df43-1a40-42e8-88e2-6487812a1612 for instance with vm_state building and task_state spawning. [ 650.710960] env[61545]: DEBUG nova.scheduler.client.report [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 650.715318] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.993617] env[61545]: DEBUG nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 651.026842] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 651.027163] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.027241] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 651.027542] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.027626] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 651.027713] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 651.027902] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 651.028073] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 651.028242] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 651.028722] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 651.028722] env[61545]: DEBUG nova.virt.hardware [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 651.029540] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37af9a8d-3988-4d8a-bb22-0425c61d5fdc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.038030] env[61545]: DEBUG nova.network.neutron [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Updating instance_info_cache with network_info: [{"id": "d548df43-1a40-42e8-88e2-6487812a1612", "address": "fa:16:3e:40:77:7b", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd548df43-1a", "ovs_interfaceid": "d548df43-1a40-42e8-88e2-6487812a1612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.040752] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2741b1-6785-4a90-9d0a-c787d1dbb56b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.219026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.266s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.219240] env[61545]: DEBUG nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 651.223036] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.407s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.225258] env[61545]: INFO nova.compute.claims [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 651.415569] env[61545]: DEBUG nova.network.neutron [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Successfully created port: 72abf1ec-6ac4-4654-b096-bdfb06c58f03 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.547652] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Releasing lock "refresh_cache-2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.547993] env[61545]: DEBUG nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Instance network_info: |[{"id": "d548df43-1a40-42e8-88e2-6487812a1612", "address": "fa:16:3e:40:77:7b", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd548df43-1a", "ovs_interfaceid": "d548df43-1a40-42e8-88e2-6487812a1612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 651.549028] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:77:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd548df43-1a40-42e8-88e2-6487812a1612', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 651.564943] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 651.565707] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8873114b-9a37-4561-a644-7121bd0936d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.584302] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Created folder: OpenStack in parent group-v4. [ 651.584302] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Creating folder: Project (e854a184de8e4b02aa3594b81c6d99f5). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 651.584302] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-496a361b-00c3-45ac-91b8-a1e1ebf697a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.585189] env[61545]: DEBUG nova.network.neutron [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Successfully updated port: 25b3b54b-4b63-407e-bd50-625dca707982 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.596681] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Created folder: Project (e854a184de8e4b02aa3594b81c6d99f5) in parent group-v838542. [ 651.596897] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Creating folder: Instances. Parent ref: group-v838543. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 651.598838] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76adafb8-e24b-442d-babc-ba1468bc9edf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.613285] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Created folder: Instances in parent group-v838543. [ 651.613789] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 651.614697] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 651.615400] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23961b02-c46f-4a93-8203-2be8d89560ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.642382] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 651.642382] env[61545]: value = "task-4255294" [ 651.642382] env[61545]: _type = "Task" [ 651.642382] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.657080] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255294, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.734036] env[61545]: DEBUG nova.compute.utils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 651.738733] env[61545]: DEBUG nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 651.738914] env[61545]: DEBUG nova.network.neutron [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 651.928632] env[61545]: DEBUG nova.policy [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5e17b800ba241cc950c51ded0600efb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6a39a7ba1754d1ea7d5a67a7ff09149', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 652.088847] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquiring lock "refresh_cache-1a551e66-1b98-44fd-ad16-c20113d9b1a6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.091103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquired lock "refresh_cache-1a551e66-1b98-44fd-ad16-c20113d9b1a6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.091103] env[61545]: DEBUG nova.network.neutron [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.156176] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255294, 'name': CreateVM_Task, 'duration_secs': 0.467851} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.156354] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 652.239108] env[61545]: DEBUG nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 652.326444] env[61545]: DEBUG oslo_vmware.service [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75972a1c-c875-4a27-a01e-32b45d6152ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.338329] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.338329] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.338545] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 652.339146] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e10128d4-478b-4ceb-9706-f32fb858b063 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.346811] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 652.346811] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5298eb50-cb1c-6a7f-280a-ac13fe2fb3f3" [ 652.346811] env[61545]: _type = "Task" [ 652.346811] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.365459] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5298eb50-cb1c-6a7f-280a-ac13fe2fb3f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.404028] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1101e3-7d80-4d67-886b-91ee47379162 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.415700] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a299c299-c9ae-4c36-8626-f825b25cee65 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.452557] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcf0c1d-299e-4434-948e-88e9241e704a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.462098] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cd7881-491b-4518-86e0-6bd92130c968 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.481378] env[61545]: DEBUG nova.compute.provider_tree [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.690575] env[61545]: DEBUG nova.network.neutron [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.861439] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.861743] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 652.862051] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.862207] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.862682] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 652.863019] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4d9475f-33ea-41a0-8de2-c813bbe1b0ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.887196] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 652.888715] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 652.890235] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135c5500-ddba-41d2-885e-3ab54b2695c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.899977] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac9d932e-bc5c-465c-93ed-77c89139f6e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.912479] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 652.912479] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5284543a-14c5-45c7-49bd-df9d75f9a418" [ 652.912479] env[61545]: _type = "Task" [ 652.912479] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.924411] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5284543a-14c5-45c7-49bd-df9d75f9a418, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.984848] env[61545]: DEBUG nova.scheduler.client.report [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 653.188025] env[61545]: DEBUG nova.network.neutron [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Updating instance_info_cache with network_info: [{"id": "25b3b54b-4b63-407e-bd50-625dca707982", "address": "fa:16:3e:1f:0c:19", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25b3b54b-4b", "ovs_interfaceid": "25b3b54b-4b63-407e-bd50-625dca707982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.260992] env[61545]: DEBUG nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 653.316134] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 653.316134] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.319416] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.319593] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.319700] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.319824] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 653.320084] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 653.320580] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 653.320580] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 653.320580] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 653.320868] env[61545]: DEBUG nova.virt.hardware [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 653.321822] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d780ed65-3b50-4435-a7d2-b50bb976f719 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.331418] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f55e9d-d992-4c4d-9e92-b3d84ec0309b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.387807] env[61545]: DEBUG nova.network.neutron [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Successfully created port: bd5363b5-74ac-467c-9834-a90fbaf697db {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.426645] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Preparing fetch location {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 653.426645] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Creating directory with path [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.426645] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1bf8b1e-abcb-4507-9991-b5dd37ba33b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.462412] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Created directory with path [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.462839] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Fetch image to [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/tmp-sparse.vmdk {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 653.462960] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Downloading image file data bf68eb43-6d66-4532-9eb1-af7d78faa698 to [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/tmp-sparse.vmdk on the data store datastore2 {{(pid=61545) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 653.463848] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f82ac25-8ac6-47a0-b0f3-6f0b8f753aa6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.475483] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1cb344-ca10-4e3d-9d0f-255134351d92 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.489341] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.266s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.489836] env[61545]: DEBUG nova.compute.manager [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 653.494282] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.817s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.495746] env[61545]: INFO nova.compute.claims [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 653.499833] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96d63f7-5794-425b-873a-2e09d8e5a2e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.510209] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "844f01ed-4dae-4e13-9d1c-09a73f413201" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.510209] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.548025] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c8743d-d984-4ef7-80e2-a6b47aa778f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.553922] env[61545]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-930abdf9-2cf6-4529-8c72-5a0ae22d9bc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.588568] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Downloading image file data bf68eb43-6d66-4532-9eb1-af7d78faa698 to the data store datastore2 {{(pid=61545) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 653.678903] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61545) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 653.745586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Releasing lock "refresh_cache-1a551e66-1b98-44fd-ad16-c20113d9b1a6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.745874] env[61545]: DEBUG nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Instance network_info: |[{"id": "25b3b54b-4b63-407e-bd50-625dca707982", "address": "fa:16:3e:1f:0c:19", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25b3b54b-4b", "ovs_interfaceid": "25b3b54b-4b63-407e-bd50-625dca707982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 653.746735] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:0c:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25b3b54b-4b63-407e-bd50-625dca707982', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.754561] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Creating folder: Project (1052fbf2fd0047e49d219d5faeb34af0). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 653.756198] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcff2be6-4dfb-411c-b5ec-63e65749b8e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.772193] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Created folder: Project (1052fbf2fd0047e49d219d5faeb34af0) in parent group-v838542. [ 653.773086] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Creating folder: Instances. Parent ref: group-v838546. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 653.773086] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-692a24cd-6a9d-4fc8-a344-7d25afcefb41 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.784342] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Created folder: Instances in parent group-v838546. [ 653.784611] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 653.784911] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 653.785283] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa9dcd42-4a08-4783-823b-f10f725bdd3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.811512] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.811512] env[61545]: value = "task-4255297" [ 653.811512] env[61545]: _type = "Task" [ 653.811512] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.824778] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255297, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.995545] env[61545]: DEBUG nova.network.neutron [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Successfully updated port: 72abf1ec-6ac4-4654-b096-bdfb06c58f03 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 654.017501] env[61545]: DEBUG nova.compute.utils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 654.029495] env[61545]: DEBUG nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 654.035019] env[61545]: DEBUG nova.compute.manager [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 654.098189] env[61545]: DEBUG nova.compute.manager [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Received event network-changed-d548df43-1a40-42e8-88e2-6487812a1612 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 654.100032] env[61545]: DEBUG nova.compute.manager [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Refreshing instance network info cache due to event network-changed-d548df43-1a40-42e8-88e2-6487812a1612. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 654.100032] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Acquiring lock "refresh_cache-2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.100032] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Acquired lock "refresh_cache-2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.100032] env[61545]: DEBUG nova.network.neutron [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Refreshing network info cache for port d548df43-1a40-42e8-88e2-6487812a1612 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 654.336071] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255297, 'name': CreateVM_Task, 'duration_secs': 0.404581} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.336071] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 654.336925] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.337108] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.337800] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 654.338147] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d411e526-fe2e-466c-b75a-88baf22b5389 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.347916] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 654.347916] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52457ed2-f41c-ca85-8ef0-dba66969db52" [ 654.347916] env[61545]: _type = "Task" [ 654.347916] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.357213] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52457ed2-f41c-ca85-8ef0-dba66969db52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.497752] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Completed reading data from the image iterator. {{(pid=61545) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 654.497752] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 654.501521] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquiring lock "refresh_cache-256e48c1-81de-4d32-97dc-ba80541a9239" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.501782] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquired lock "refresh_cache-256e48c1-81de-4d32-97dc-ba80541a9239" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.501888] env[61545]: DEBUG nova.network.neutron [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 654.535931] env[61545]: DEBUG nova.compute.manager [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 654.565898] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.567283] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Downloaded image file data bf68eb43-6d66-4532-9eb1-af7d78faa698 to vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/tmp-sparse.vmdk on the data store datastore2 {{(pid=61545) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 654.569200] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Caching image {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 654.569946] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Copying Virtual Disk [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/tmp-sparse.vmdk to [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.570142] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-746e545e-1d20-43ea-bdaf-0000f90be131 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.584758] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 654.584758] env[61545]: value = "task-4255298" [ 654.584758] env[61545]: _type = "Task" [ 654.584758] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.595915] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255298, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.683653] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069ce21c-2316-43fa-aa51-7a09b35120c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.696935] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ae250a-7aa0-40de-a6ca-f3d3d815ceff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.734970] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7727912a-8f72-4d3b-bb3f-7b24698d96be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.744368] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0a27e0-80b0-48a0-b016-5fe99c7e0d5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.763318] env[61545]: DEBUG nova.compute.provider_tree [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.868755] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.869084] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.869305] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.098473] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255298, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.101340] env[61545]: DEBUG nova.network.neutron [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.269254] env[61545]: DEBUG nova.scheduler.client.report [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 655.442064] env[61545]: DEBUG nova.network.neutron [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Updated VIF entry in instance network info cache for port d548df43-1a40-42e8-88e2-6487812a1612. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 655.442064] env[61545]: DEBUG nova.network.neutron [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Updating instance_info_cache with network_info: [{"id": "d548df43-1a40-42e8-88e2-6487812a1612", "address": "fa:16:3e:40:77:7b", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd548df43-1a", "ovs_interfaceid": "d548df43-1a40-42e8-88e2-6487812a1612", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.519242] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "4f879b20-bae0-4d50-b5e9-378356341962" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.519676] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "4f879b20-bae0-4d50-b5e9-378356341962" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.539743] env[61545]: DEBUG nova.network.neutron [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Updating instance_info_cache with network_info: [{"id": "72abf1ec-6ac4-4654-b096-bdfb06c58f03", "address": "fa:16:3e:d6:18:cb", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72abf1ec-6a", "ovs_interfaceid": "72abf1ec-6ac4-4654-b096-bdfb06c58f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.554314] env[61545]: DEBUG nova.compute.manager [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 655.588277] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.588613] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.588613] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.588763] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.588982] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.589092] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.589324] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.589489] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.589731] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.589844] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.590028] env[61545]: DEBUG nova.virt.hardware [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.591605] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58141641-3d95-4395-af59-a897fd33cc5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.617731] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c321843-1552-4dc6-a631-f2016ae691b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.630215] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "8a3ac91d-8949-4745-9161-1a70899c0293" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.630969] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "8a3ac91d-8949-4745-9161-1a70899c0293" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.637804] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255298, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715912} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.638266] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Copied Virtual Disk [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/tmp-sparse.vmdk to [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.639201] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Deleting the datastore file [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698/tmp-sparse.vmdk {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 655.639201] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b01870b3-9540-4069-81be-7024c465680a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.650013] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.656059] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Creating folder: Project (4f38dd9159814b24a14ecac0118e1c2b). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.657544] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a59bf8a4-43d5-4680-9c46-7a77d3456a84 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.663055] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 655.663055] env[61545]: value = "task-4255299" [ 655.663055] env[61545]: _type = "Task" [ 655.663055] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.670770] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Created folder: Project (4f38dd9159814b24a14ecac0118e1c2b) in parent group-v838542. [ 655.670770] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Creating folder: Instances. Parent ref: group-v838549. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.670957] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd7cddca-5fde-4bac-a5cf-befa072ca4de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.676850] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.681771] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Created folder: Instances in parent group-v838549. [ 655.682079] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 655.682296] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 655.682503] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ed263b3-3dbc-4276-bab7-3039fa36d82a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.701619] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.701619] env[61545]: value = "task-4255302" [ 655.701619] env[61545]: _type = "Task" [ 655.701619] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.711213] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255302, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.776771] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.777365] env[61545]: DEBUG nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 655.785150] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.067s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.785150] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.785150] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 655.785150] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.218s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.788550] env[61545]: INFO nova.compute.claims [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.791805] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca67307a-3b36-4e2b-91b7-d6d7427d3c6b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.808172] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d526a6-181b-4b98-8c02-aebffd3583c7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.826491] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c52a8e-97f2-4f0f-9ad9-525da3e6b091 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.838283] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c951e9f-04dd-4993-8497-7db460a8a165 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.880318] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180772MB free_disk=247GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 655.880532] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.887346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "5719daa8-a5bc-4604-b465-a57097695c6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.887880] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "5719daa8-a5bc-4604-b465-a57097695c6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.945729] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Releasing lock "refresh_cache-2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.946028] env[61545]: DEBUG nova.compute.manager [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Received event network-vif-plugged-25b3b54b-4b63-407e-bd50-625dca707982 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 655.946228] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Acquiring lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.946485] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.946659] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.948299] env[61545]: DEBUG nova.compute.manager [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] No waiting events found dispatching network-vif-plugged-25b3b54b-4b63-407e-bd50-625dca707982 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 655.948514] env[61545]: WARNING nova.compute.manager [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Received unexpected event network-vif-plugged-25b3b54b-4b63-407e-bd50-625dca707982 for instance with vm_state building and task_state spawning. [ 655.948710] env[61545]: DEBUG nova.compute.manager [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Received event network-changed-25b3b54b-4b63-407e-bd50-625dca707982 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 655.948880] env[61545]: DEBUG nova.compute.manager [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Refreshing instance network info cache due to event network-changed-25b3b54b-4b63-407e-bd50-625dca707982. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 655.949080] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Acquiring lock "refresh_cache-1a551e66-1b98-44fd-ad16-c20113d9b1a6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.949218] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Acquired lock "refresh_cache-1a551e66-1b98-44fd-ad16-c20113d9b1a6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.949722] env[61545]: DEBUG nova.network.neutron [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Refreshing network info cache for port 25b3b54b-4b63-407e-bd50-625dca707982 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.023472] env[61545]: DEBUG nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.047762] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Releasing lock "refresh_cache-256e48c1-81de-4d32-97dc-ba80541a9239" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.047762] env[61545]: DEBUG nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Instance network_info: |[{"id": "72abf1ec-6ac4-4654-b096-bdfb06c58f03", "address": "fa:16:3e:d6:18:cb", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72abf1ec-6a", "ovs_interfaceid": "72abf1ec-6ac4-4654-b096-bdfb06c58f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.048046] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:18:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72abf1ec-6ac4-4654-b096-bdfb06c58f03', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.054299] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Creating folder: Project (35d3b51d6e8c48c69cbf99d32114d8f6). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.054600] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc73a8e5-5a2d-4412-b41d-6d899d859a76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.068389] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Created folder: Project (35d3b51d6e8c48c69cbf99d32114d8f6) in parent group-v838542. [ 656.068599] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Creating folder: Instances. Parent ref: group-v838552. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.069692] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fc18219-9462-44b9-a339-a9e9976b9d5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.081626] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Created folder: Instances in parent group-v838552. [ 656.082226] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.082226] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.082356] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01d1ea8f-c357-4d99-8188-bfe42734af44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.107269] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.107269] env[61545]: value = "task-4255305" [ 656.107269] env[61545]: _type = "Task" [ 656.107269] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.118848] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255305, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.138860] env[61545]: DEBUG nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.165524] env[61545]: DEBUG nova.network.neutron [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Successfully updated port: bd5363b5-74ac-467c-9834-a90fbaf697db {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 656.179302] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030312} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.182068] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 656.182068] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Moving file from [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5/bf68eb43-6d66-4532-9eb1-af7d78faa698 to [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698. {{(pid=61545) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 656.182068] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-0ec41c4f-7986-48c4-a2d3-85ca127cc189 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.191586] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 656.191586] env[61545]: value = "task-4255306" [ 656.191586] env[61545]: _type = "Task" [ 656.191586] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.205786] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255306, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.215641] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255302, 'name': CreateVM_Task, 'duration_secs': 0.328641} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.215641] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.215641] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.215641] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.215641] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 656.216490] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abf6bccc-0fcf-4860-bd32-f351237b6ef6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.222125] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 656.222125] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5224bd7c-2658-19e5-78eb-1e1fc5c5efb1" [ 656.222125] env[61545]: _type = "Task" [ 656.222125] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.233537] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5224bd7c-2658-19e5-78eb-1e1fc5c5efb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.295461] env[61545]: DEBUG nova.compute.utils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 656.295461] env[61545]: DEBUG nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 656.295461] env[61545]: DEBUG nova.network.neutron [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 656.391611] env[61545]: DEBUG nova.compute.manager [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.422327] env[61545]: DEBUG nova.policy [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de2ff39939bd440b8df0819c626fc2ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f56d2d605ffd4d098959105ab53d9803', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 656.566691] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.619716] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255305, 'name': CreateVM_Task, 'duration_secs': 0.384015} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.620203] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.620952] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.661772] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.673677] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquiring lock "refresh_cache-5d9eadff-7f13-4720-8119-5829b4802c21" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.673677] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquired lock "refresh_cache-5d9eadff-7f13-4720-8119-5829b4802c21" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.673677] env[61545]: DEBUG nova.network.neutron [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 656.711444] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255306, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.049142} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.711746] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] File moved {{(pid=61545) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 656.712044] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Cleaning up location [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 656.712314] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Deleting the datastore file [datastore2] vmware_temp/05e1da61-8e21-47e8-ac24-6d06bb6d26a5 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 656.712636] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29afb285-1541-4bdb-852b-b9a8dadb5ac3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.723014] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 656.723014] env[61545]: value = "task-4255307" [ 656.723014] env[61545]: _type = "Task" [ 656.723014] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.738731] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5224bd7c-2658-19e5-78eb-1e1fc5c5efb1, 'name': SearchDatastore_Task, 'duration_secs': 0.012138} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.742175] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.742402] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.742674] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.742833] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.743086] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.743394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 656.743644] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f43b3583-2d68-414b-b125-73a6f0511be7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.750429] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 656.750429] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52409382-c867-79d9-8dd8-9bab2022d5b2" [ 656.750429] env[61545]: _type = "Task" [ 656.750429] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.761773] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52409382-c867-79d9-8dd8-9bab2022d5b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.802485] env[61545]: DEBUG nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 656.918034] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.029517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d724fb17-635b-44f7-9d1b-dc2ed49a6ffc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.039364] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3463d8-c817-4507-b4dd-3d599885ebb2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.073863] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf63aa8e-a011-435f-9664-017fc58e8aa0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.083639] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef18c8d-0889-4429-9792-393f6a298f4a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.099466] env[61545]: DEBUG nova.compute.provider_tree [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.116016] env[61545]: DEBUG nova.network.neutron [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Updated VIF entry in instance network info cache for port 25b3b54b-4b63-407e-bd50-625dca707982. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 657.116718] env[61545]: DEBUG nova.network.neutron [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Updating instance_info_cache with network_info: [{"id": "25b3b54b-4b63-407e-bd50-625dca707982", "address": "fa:16:3e:1f:0c:19", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25b3b54b-4b", "ovs_interfaceid": "25b3b54b-4b63-407e-bd50-625dca707982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.125026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquiring lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.125026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.239760] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.028457} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.240891] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 657.242181] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-744e4d9a-a15f-4809-ad04-564dfe59468e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.251828] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 657.251828] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52139a60-6818-394b-d274-177fad5e7019" [ 657.251828] env[61545]: _type = "Task" [ 657.251828] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.278769] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52139a60-6818-394b-d274-177fad5e7019, 'name': SearchDatastore_Task, 'duration_secs': 0.009966} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.282905] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.283219] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc/2a43ac48-cdea-48c8-b3d2-e939c69ce2dc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.283512] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52409382-c867-79d9-8dd8-9bab2022d5b2, 'name': SearchDatastore_Task, 'duration_secs': 0.010997} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.283729] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.283913] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.284174] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7af99883-db17-4339-8d88-784f6a0d08cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.286920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.286920] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.286920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.287213] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a0e82b6-bac7-43fc-87e7-d2a890078e1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.295472] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 657.295472] env[61545]: value = "task-4255308" [ 657.295472] env[61545]: _type = "Task" [ 657.295472] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.297064] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.297163] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 657.305957] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78ecc4b9-29bd-4fd2-8a6d-1b5708602388 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.319508] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 657.319508] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f4a734-519e-6165-c843-85eb11ccd673" [ 657.319508] env[61545]: _type = "Task" [ 657.319508] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.323049] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.335546] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f4a734-519e-6165-c843-85eb11ccd673, 'name': SearchDatastore_Task, 'duration_secs': 0.011463} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.336732] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9481c75e-58b0-4a3e-82fd-d7bd8669337a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.343784] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 657.343784] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5222979e-6025-b762-1b2e-309a7e482687" [ 657.343784] env[61545]: _type = "Task" [ 657.343784] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.357255] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5222979e-6025-b762-1b2e-309a7e482687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.470217] env[61545]: DEBUG nova.network.neutron [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.605244] env[61545]: DEBUG nova.scheduler.client.report [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.621469] env[61545]: DEBUG oslo_concurrency.lockutils [req-08c4767e-9165-4294-9a77-268bd2eb7d7e req-3c7e4411-c896-4411-a4e1-fc886e08ae92 service nova] Releasing lock "refresh_cache-1a551e66-1b98-44fd-ad16-c20113d9b1a6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.814452] env[61545]: DEBUG nova.network.neutron [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Successfully created port: d33a6a2d-6310-4263-adf4-dcf09ce72a6b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.816245] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255308, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.821226] env[61545]: DEBUG nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 657.857774] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5222979e-6025-b762-1b2e-309a7e482687, 'name': SearchDatastore_Task, 'duration_secs': 0.011551} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.858061] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.858541] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1a551e66-1b98-44fd-ad16-c20113d9b1a6/1a551e66-1b98-44fd-ad16-c20113d9b1a6.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.858663] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.858795] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.859047] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c817396-a72d-433a-bb47-007c6b2f179c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.866680] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 657.866886] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.867136] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 657.867433] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.867781] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 657.867855] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 657.868106] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 657.868331] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 657.868549] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 657.868764] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 657.869066] env[61545]: DEBUG nova.virt.hardware [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 657.869441] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d87812b-8cfd-4fb2-99f4-31e549b8d762 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.873968] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7e378e-5f38-4bff-8987-e799467e13cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.888078] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.888671] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 657.889302] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 657.889302] env[61545]: value = "task-4255309" [ 657.889302] env[61545]: _type = "Task" [ 657.889302] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.889971] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da3fb293-6910-4594-af89-e60397d7316b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.900318] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd33dcc-1f6a-4f54-896d-b70d8c662e38 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.908849] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 657.908849] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52150f5f-3dee-4915-d831-f5ff0c99e2ac" [ 657.908849] env[61545]: _type = "Task" [ 657.908849] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.926429] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.932522] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52150f5f-3dee-4915-d831-f5ff0c99e2ac, 'name': SearchDatastore_Task, 'duration_secs': 0.008893} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.933640] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88abbe44-7291-4268-800d-2ac3aafe25a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.940200] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 657.940200] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525202cc-baa5-b782-62e6-1aec30c397e8" [ 657.940200] env[61545]: _type = "Task" [ 657.940200] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.949254] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525202cc-baa5-b782-62e6-1aec30c397e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.024393] env[61545]: DEBUG nova.network.neutron [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Updating instance_info_cache with network_info: [{"id": "bd5363b5-74ac-467c-9834-a90fbaf697db", "address": "fa:16:3e:44:d3:13", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd5363b5-74", "ovs_interfaceid": "bd5363b5-74ac-467c-9834-a90fbaf697db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.114769] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.115486] env[61545]: DEBUG nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.118992] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.238s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.314346] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54347} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.314799] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc/2a43ac48-cdea-48c8-b3d2-e939c69ce2dc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 658.315104] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 658.315555] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b56d477-6cfc-4544-84b8-b331294e1797 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.327843] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 658.327843] env[61545]: value = "task-4255310" [ 658.327843] env[61545]: _type = "Task" [ 658.327843] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.339033] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.409601] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472792} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.409695] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1a551e66-1b98-44fd-ad16-c20113d9b1a6/1a551e66-1b98-44fd-ad16-c20113d9b1a6.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 658.409912] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 658.410112] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-735beb21-a2f2-49cb-aad9-aeadf9e239b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.419014] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 658.419014] env[61545]: value = "task-4255311" [ 658.419014] env[61545]: _type = "Task" [ 658.419014] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.434036] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255311, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.457264] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525202cc-baa5-b782-62e6-1aec30c397e8, 'name': SearchDatastore_Task, 'duration_secs': 0.011471} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.457264] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.457264] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 579fb20b-083f-4227-9a13-c0f1ea36e272/579fb20b-083f-4227-9a13-c0f1ea36e272.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 658.457541] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.457729] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 658.457936] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a563616a-7c30-4bff-a45d-52e9490274bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.463190] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16c28eec-a72c-4b43-bf90-5afab98b3ae9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.466248] env[61545]: DEBUG nova.compute.manager [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Received event network-vif-plugged-72abf1ec-6ac4-4654-b096-bdfb06c58f03 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 658.466462] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Acquiring lock "256e48c1-81de-4d32-97dc-ba80541a9239-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.466669] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Lock "256e48c1-81de-4d32-97dc-ba80541a9239-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.466877] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Lock "256e48c1-81de-4d32-97dc-ba80541a9239-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.467034] env[61545]: DEBUG nova.compute.manager [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] No waiting events found dispatching network-vif-plugged-72abf1ec-6ac4-4654-b096-bdfb06c58f03 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 658.467193] env[61545]: WARNING nova.compute.manager [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Received unexpected event network-vif-plugged-72abf1ec-6ac4-4654-b096-bdfb06c58f03 for instance with vm_state building and task_state spawning. [ 658.467348] env[61545]: DEBUG nova.compute.manager [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Received event network-changed-72abf1ec-6ac4-4654-b096-bdfb06c58f03 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 658.467500] env[61545]: DEBUG nova.compute.manager [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Refreshing instance network info cache due to event network-changed-72abf1ec-6ac4-4654-b096-bdfb06c58f03. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 658.467700] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Acquiring lock "refresh_cache-256e48c1-81de-4d32-97dc-ba80541a9239" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.467831] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Acquired lock "refresh_cache-256e48c1-81de-4d32-97dc-ba80541a9239" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.467978] env[61545]: DEBUG nova.network.neutron [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Refreshing network info cache for port 72abf1ec-6ac4-4654-b096-bdfb06c58f03 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 658.476893] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 658.476893] env[61545]: value = "task-4255312" [ 658.476893] env[61545]: _type = "Task" [ 658.476893] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.478671] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 658.478903] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 658.485922] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-598bcf98-bb1a-4f59-a395-6e41b6c212c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.493421] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 658.493421] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52727b8a-4c11-d94b-b1af-7b60270e0856" [ 658.493421] env[61545]: _type = "Task" [ 658.493421] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.498327] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.507911] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52727b8a-4c11-d94b-b1af-7b60270e0856, 'name': SearchDatastore_Task, 'duration_secs': 0.010126} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.508744] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2216e020-4f29-485e-bd43-612a3014e4a4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.514737] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 658.514737] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1a4ce-97e5-4473-11f3-355aa8f86262" [ 658.514737] env[61545]: _type = "Task" [ 658.514737] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.523845] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1a4ce-97e5-4473-11f3-355aa8f86262, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.526910] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Releasing lock "refresh_cache-5d9eadff-7f13-4720-8119-5829b4802c21" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.527284] env[61545]: DEBUG nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Instance network_info: |[{"id": "bd5363b5-74ac-467c-9834-a90fbaf697db", "address": "fa:16:3e:44:d3:13", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd5363b5-74", "ovs_interfaceid": "bd5363b5-74ac-467c-9834-a90fbaf697db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 658.527689] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:d3:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd5363b5-74ac-467c-9834-a90fbaf697db', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.536402] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Creating folder: Project (c6a39a7ba1754d1ea7d5a67a7ff09149). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 658.537038] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f7c4a61-0694-424f-b285-4227e1f1f43e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.549533] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Created folder: Project (c6a39a7ba1754d1ea7d5a67a7ff09149) in parent group-v838542. [ 658.549849] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Creating folder: Instances. Parent ref: group-v838555. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 658.550069] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-549ebff5-a4f7-4cfc-9f05-ef9f9ff36bdf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.560957] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Created folder: Instances in parent group-v838555. [ 658.561918] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 658.561918] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 658.561918] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb0b6ed3-6bf2-4a1b-8d90-17c88c5f9c28 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.584290] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 658.584290] env[61545]: value = "task-4255315" [ 658.584290] env[61545]: _type = "Task" [ 658.584290] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.593595] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255315, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.623144] env[61545]: DEBUG nova.compute.utils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.629745] env[61545]: DEBUG nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.629942] env[61545]: DEBUG nova.network.neutron [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.754518] env[61545]: DEBUG nova.policy [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb00c18cd27541359ae0adf45f5c4171', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa2360863a5f4eff8a88eca0c88fa76d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 658.840781] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075778} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.843735] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 658.845304] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db931a88-f5f0-4894-8d7f-815087b1fbc0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.886035] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc/2a43ac48-cdea-48c8-b3d2-e939c69ce2dc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 658.886035] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eea03ad0-8e7d-4d16-9de4-8cfa580e1119 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.915231] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 658.915231] env[61545]: value = "task-4255316" [ 658.915231] env[61545]: _type = "Task" [ 658.915231] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.927225] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255316, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.933707] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255311, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082014} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.933999] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 658.934917] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0bc097-6317-4f19-830a-097cd6e068e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.968708] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 1a551e66-1b98-44fd-ad16-c20113d9b1a6/1a551e66-1b98-44fd-ad16-c20113d9b1a6.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 658.970023] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50cf1cf5-f646-4286-b9bc-04c8b514d46d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.998126] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255312, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.999901] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 658.999901] env[61545]: value = "task-4255317" [ 658.999901] env[61545]: _type = "Task" [ 658.999901] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.027221] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1a4ce-97e5-4473-11f3-355aa8f86262, 'name': SearchDatastore_Task, 'duration_secs': 0.010702} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.027574] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.027875] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 256e48c1-81de-4d32-97dc-ba80541a9239/256e48c1-81de-4d32-97dc-ba80541a9239.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 659.028214] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4eca43ef-2af0-4bb2-b990-f304025d9e53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.035953] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 659.035953] env[61545]: value = "task-4255318" [ 659.035953] env[61545]: _type = "Task" [ 659.035953] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.045961] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255318, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.097554] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255315, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.133684] env[61545]: DEBUG nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 659.167916] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.168115] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1a551e66-1b98-44fd-ad16-c20113d9b1a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.168344] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 256e48c1-81de-4d32-97dc-ba80541a9239 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.168476] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 579fb20b-083f-4227-9a13-c0f1ea36e272 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.168646] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5d9eadff-7f13-4720-8119-5829b4802c21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.170721] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance eced4107-b99e-479e-b22c-2157320ecf95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.170721] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 844f01ed-4dae-4e13-9d1c-09a73f413201 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.364720] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquiring lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.365212] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.434228] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255316, 'name': ReconfigVM_Task, 'duration_secs': 0.458888} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.434582] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc/2a43ac48-cdea-48c8-b3d2-e939c69ce2dc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 659.435453] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7301912-c1f3-4062-a9d3-cc138a2578d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.445273] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 659.445273] env[61545]: value = "task-4255319" [ 659.445273] env[61545]: _type = "Task" [ 659.445273] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.462068] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255319, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.499121] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539586} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.501527] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 579fb20b-083f-4227-9a13-c0f1ea36e272/579fb20b-083f-4227-9a13-c0f1ea36e272.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 659.501527] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 659.501527] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1696cbc0-fd0e-40cd-b0b0-cd17f3de61e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.513328] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.515111] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 659.515111] env[61545]: value = "task-4255320" [ 659.515111] env[61545]: _type = "Task" [ 659.515111] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.525521] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255320, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.551405] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255318, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.598608] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255315, 'name': CreateVM_Task, 'duration_secs': 0.661471} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.599298] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 659.599959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.600239] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.600727] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 659.602198] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6b2821a-ad93-480e-ab8c-2d8c84d5dcd0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.611856] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 659.611856] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5281b341-fddf-b8fd-9fef-18ed47fef7a8" [ 659.611856] env[61545]: _type = "Task" [ 659.611856] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.623281] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5281b341-fddf-b8fd-9fef-18ed47fef7a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.650538] env[61545]: DEBUG nova.network.neutron [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Updated VIF entry in instance network info cache for port 72abf1ec-6ac4-4654-b096-bdfb06c58f03. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 659.650974] env[61545]: DEBUG nova.network.neutron [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Updating instance_info_cache with network_info: [{"id": "72abf1ec-6ac4-4654-b096-bdfb06c58f03", "address": "fa:16:3e:d6:18:cb", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72abf1ec-6a", "ovs_interfaceid": "72abf1ec-6ac4-4654-b096-bdfb06c58f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.674974] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 4f879b20-bae0-4d50-b5e9-378356341962 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.768025] env[61545]: DEBUG nova.network.neutron [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Successfully created port: a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.958077] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255319, 'name': Rename_Task, 'duration_secs': 0.247102} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.958482] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 659.958803] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0e5f29f-b286-412c-ac87-bc426f96c81c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.967754] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 659.967754] env[61545]: value = "task-4255321" [ 659.967754] env[61545]: _type = "Task" [ 659.967754] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.982536] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255321, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.013097] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255317, 'name': ReconfigVM_Task, 'duration_secs': 0.7719} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.013097] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 1a551e66-1b98-44fd-ad16-c20113d9b1a6/1a551e66-1b98-44fd-ad16-c20113d9b1a6.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 660.013097] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-810ea221-0101-44c7-b257-857b47e016f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.020367] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 660.020367] env[61545]: value = "task-4255322" [ 660.020367] env[61545]: _type = "Task" [ 660.020367] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.027260] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255320, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102841} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.027922] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 660.028731] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee29e199-d87f-4a56-8050-443380f37c07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.034745] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255322, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.056672] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 579fb20b-083f-4227-9a13-c0f1ea36e272/579fb20b-083f-4227-9a13-c0f1ea36e272.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 660.061246] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09dfce65-d129-4cbe-ad1f-2622bf771a82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.087720] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255318, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.089708] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 256e48c1-81de-4d32-97dc-ba80541a9239/256e48c1-81de-4d32-97dc-ba80541a9239.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 660.089708] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 660.089973] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 660.089973] env[61545]: value = "task-4255323" [ 660.089973] env[61545]: _type = "Task" [ 660.089973] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.089973] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-711357b5-5ff0-4cce-9554-855091a5d061 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.104920] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255323, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.108256] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 660.108256] env[61545]: value = "task-4255324" [ 660.108256] env[61545]: _type = "Task" [ 660.108256] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.128666] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255324, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.128982] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5281b341-fddf-b8fd-9fef-18ed47fef7a8, 'name': SearchDatastore_Task, 'duration_secs': 0.020064} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.129318] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.129590] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 660.129861] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.130058] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.130319] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 660.130568] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aebb184e-5588-4317-b00f-d2784737fdbc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.143420] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 660.143530] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 660.144269] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce22c5a8-47c5-4572-b804-2a2a60311201 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.151787] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 660.151787] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a2b157-e566-ef2d-04f3-41ecefe982d5" [ 660.151787] env[61545]: _type = "Task" [ 660.151787] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.158858] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Releasing lock "refresh_cache-256e48c1-81de-4d32-97dc-ba80541a9239" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.159141] env[61545]: DEBUG nova.compute.manager [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Received event network-vif-plugged-bd5363b5-74ac-467c-9834-a90fbaf697db {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 660.159343] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Acquiring lock "5d9eadff-7f13-4720-8119-5829b4802c21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.159553] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Lock "5d9eadff-7f13-4720-8119-5829b4802c21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.159713] env[61545]: DEBUG oslo_concurrency.lockutils [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] Lock "5d9eadff-7f13-4720-8119-5829b4802c21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.159875] env[61545]: DEBUG nova.compute.manager [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] No waiting events found dispatching network-vif-plugged-bd5363b5-74ac-467c-9834-a90fbaf697db {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 660.160057] env[61545]: WARNING nova.compute.manager [req-576ebbfa-e3b2-461b-82ba-2efaf3f923fd req-0916fd0d-2519-4821-853a-72a86b453ff0 service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Received unexpected event network-vif-plugged-bd5363b5-74ac-467c-9834-a90fbaf697db for instance with vm_state building and task_state spawning. [ 660.161265] env[61545]: DEBUG nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 660.170528] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a2b157-e566-ef2d-04f3-41ecefe982d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.183099] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 8a3ac91d-8949-4745-9161-1a70899c0293 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.203903] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 660.203903] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.203903] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 660.204190] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.204190] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 660.204190] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 660.205827] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 660.205827] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 660.205827] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 660.205827] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 660.205827] env[61545]: DEBUG nova.virt.hardware [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 660.206158] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b38d7d-a504-47be-87a6-41a6cebb1a9a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.221565] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ab5951-9d8a-4980-a0f3-9784be846218 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.483612] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255321, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.533695] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255322, 'name': Rename_Task, 'duration_secs': 0.176273} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.533897] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 660.534791] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae6929f2-e201-4743-b587-72dc8e655dbb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.538163] env[61545]: DEBUG nova.network.neutron [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Successfully updated port: d33a6a2d-6310-4263-adf4-dcf09ce72a6b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 660.542997] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 660.542997] env[61545]: value = "task-4255325" [ 660.542997] env[61545]: _type = "Task" [ 660.542997] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.553504] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.603377] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255323, 'name': ReconfigVM_Task, 'duration_secs': 0.315758} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.603377] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 579fb20b-083f-4227-9a13-c0f1ea36e272/579fb20b-083f-4227-9a13-c0f1ea36e272.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 660.604048] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b53f8783-a6ca-4555-9c2e-352dfffe86e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.615021] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 660.615021] env[61545]: value = "task-4255326" [ 660.615021] env[61545]: _type = "Task" [ 660.615021] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.623546] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255324, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097092} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.624528] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 660.625639] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7795527b-9201-473e-aafc-b26d884faa50 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.634594] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255326, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.658478] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 256e48c1-81de-4d32-97dc-ba80541a9239/256e48c1-81de-4d32-97dc-ba80541a9239.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 660.658478] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa0a4b67-f65c-4bf5-a450-be10889bc9fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.683166] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a2b157-e566-ef2d-04f3-41ecefe982d5, 'name': SearchDatastore_Task, 'duration_secs': 0.02843} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.685530] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 660.685530] env[61545]: value = "task-4255327" [ 660.685530] env[61545]: _type = "Task" [ 660.685530] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.685861] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f378bf7-fade-4652-8815-7d3c80a469bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.688942] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5719daa8-a5bc-4604-b465-a57097695c6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.699210] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 660.699210] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527b0e41-fcc1-360c-9ada-51fdc49da23d" [ 660.699210] env[61545]: _type = "Task" [ 660.699210] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.702679] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255327, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.713631] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527b0e41-fcc1-360c-9ada-51fdc49da23d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.984106] env[61545]: DEBUG oslo_vmware.api [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255321, 'name': PowerOnVM_Task, 'duration_secs': 0.716348} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.984578] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 660.985253] env[61545]: INFO nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Took 14.53 seconds to spawn the instance on the hypervisor. [ 660.985709] env[61545]: DEBUG nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 660.986820] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6024a1ad-2750-4fd4-b9bf-cddd6b206cff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.040841] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.041014] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.041200] env[61545]: DEBUG nova.network.neutron [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.062923] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255325, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.128388] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255326, 'name': Rename_Task, 'duration_secs': 0.185179} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.128682] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.128945] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22ecc9ed-4f40-4c28-9546-03ebfb9e342f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.138506] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 661.138506] env[61545]: value = "task-4255328" [ 661.138506] env[61545]: _type = "Task" [ 661.138506] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.154393] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.195691] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 0554c462-1dc5-4043-94ac-7a3d28ed05e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.203990] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255327, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.221201] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527b0e41-fcc1-360c-9ada-51fdc49da23d, 'name': SearchDatastore_Task, 'duration_secs': 0.014478} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.221368] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.221625] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5d9eadff-7f13-4720-8119-5829b4802c21/5d9eadff-7f13-4720-8119-5829b4802c21.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 661.221960] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a28b698-04dd-411b-9368-d814713f4807 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.232186] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 661.232186] env[61545]: value = "task-4255329" [ 661.232186] env[61545]: _type = "Task" [ 661.232186] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.243351] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.513852] env[61545]: INFO nova.compute.manager [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Took 19.38 seconds to build instance. [ 661.563129] env[61545]: DEBUG oslo_vmware.api [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255325, 'name': PowerOnVM_Task, 'duration_secs': 0.521294} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.563129] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 661.563723] env[61545]: INFO nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Took 12.85 seconds to spawn the instance on the hypervisor. [ 661.563723] env[61545]: DEBUG nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 661.565287] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4be0ca9-2b25-4f20-9daf-5ec0e1fa16ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.605195] env[61545]: DEBUG nova.network.neutron [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.655226] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255328, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.662848] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.663059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.698723] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.698723] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 661.698723] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=250GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 661.706079] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255327, 'name': ReconfigVM_Task, 'duration_secs': 0.639229} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.706079] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 256e48c1-81de-4d32-97dc-ba80541a9239/256e48c1-81de-4d32-97dc-ba80541a9239.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 661.706079] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a644a60d-d122-407a-a453-c90e60f22f9a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.715817] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 661.715817] env[61545]: value = "task-4255330" [ 661.715817] env[61545]: _type = "Task" [ 661.715817] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.726300] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255330, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.748981] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255329, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.978615] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd3b980-982e-4d3c-b05f-d0147f890698 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.988188] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7960f0d1-715c-456e-9d30-43eb42fe5654 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.021674] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7263cf7f-cc8e-4e5a-95a4-892f85182895 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.892s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.023345] env[61545]: DEBUG nova.network.neutron [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance_info_cache with network_info: [{"id": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "address": "fa:16:3e:1f:b2:06", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd33a6a2d-63", "ovs_interfaceid": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.026353] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03937cbb-5ac4-4172-9b1f-9066d9165625 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.036553] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434491ac-be1c-4f2d-a8c6-33f77bf4af31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.058081] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.096132] env[61545]: INFO nova.compute.manager [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Took 19.01 seconds to build instance. [ 662.153540] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255328, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.229143] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255330, 'name': Rename_Task, 'duration_secs': 0.235973} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.229495] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 662.229790] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-705ab41d-29cb-4e38-8ed6-76ffc5dcb4b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.238406] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 662.238406] env[61545]: value = "task-4255331" [ 662.238406] env[61545]: _type = "Task" [ 662.238406] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.249643] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255329, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575388} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.253338] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5d9eadff-7f13-4720-8119-5829b4802c21/5d9eadff-7f13-4720-8119-5829b4802c21.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 662.253666] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 662.254163] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.254389] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d38c7c34-2c30-4a17-941e-cfcbb13de828 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.262244] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 662.262244] env[61545]: value = "task-4255332" [ 662.262244] env[61545]: _type = "Task" [ 662.262244] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.276502] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255332, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.454825] env[61545]: DEBUG nova.network.neutron [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Successfully updated port: a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.530189] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.530619] env[61545]: DEBUG nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Instance network_info: |[{"id": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "address": "fa:16:3e:1f:b2:06", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd33a6a2d-63", "ovs_interfaceid": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 662.531132] env[61545]: DEBUG nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 662.536734] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:b2:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd33a6a2d-6310-4263-adf4-dcf09ce72a6b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.546477] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Creating folder: Project (f56d2d605ffd4d098959105ab53d9803). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.547418] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76eb8abe-c830-4f86-825e-123c018de95f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.565553] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Created folder: Project (f56d2d605ffd4d098959105ab53d9803) in parent group-v838542. [ 662.565553] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Creating folder: Instances. Parent ref: group-v838558. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.565889] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc1bcbfa-6539-4694-a378-cb1d5e45dc4b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.577758] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Created folder: Instances in parent group-v838558. [ 662.578021] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 662.578289] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.578520] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67661e82-3260-45b6-a135-f8a3ef6e8f88 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.597207] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9c21f804-cfd2-463e-85e3-8add98f23782 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.525s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.603688] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.603688] env[61545]: value = "task-4255335" [ 662.603688] env[61545]: _type = "Task" [ 662.603688] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.604709] env[61545]: ERROR nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [req-cf094164-895b-4620-ac3c-c3b02b569400] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cf094164-895b-4620-ac3c-c3b02b569400"}]} [ 662.624909] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255335, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.633650] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 662.652184] env[61545]: DEBUG oslo_vmware.api [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255328, 'name': PowerOnVM_Task, 'duration_secs': 1.330537} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.652495] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 662.652708] env[61545]: INFO nova.compute.manager [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Took 7.10 seconds to spawn the instance on the hypervisor. [ 662.652957] env[61545]: DEBUG nova.compute.manager [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 662.654472] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 662.654472] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 18 to 19 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 662.654689] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 247, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.657603] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284a779c-72cb-4156-9711-c471029406c3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.679694] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 662.708837] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 662.751490] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255331, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.775132] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255332, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076659} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.781021] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 662.784236] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e357a46-582b-4798-8a07-b874bbbbcd8f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.787442] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquiring lock "b2579785-d1a4-48da-ba27-6ee3098578f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.787694] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "b2579785-d1a4-48da-ba27-6ee3098578f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.789069] env[61545]: DEBUG nova.compute.manager [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Received event network-changed-bd5363b5-74ac-467c-9834-a90fbaf697db {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 662.789255] env[61545]: DEBUG nova.compute.manager [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Refreshing instance network info cache due to event network-changed-bd5363b5-74ac-467c-9834-a90fbaf697db. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 662.789443] env[61545]: DEBUG oslo_concurrency.lockutils [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] Acquiring lock "refresh_cache-5d9eadff-7f13-4720-8119-5829b4802c21" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.789583] env[61545]: DEBUG oslo_concurrency.lockutils [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] Acquired lock "refresh_cache-5d9eadff-7f13-4720-8119-5829b4802c21" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.789738] env[61545]: DEBUG nova.network.neutron [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Refreshing network info cache for port bd5363b5-74ac-467c-9834-a90fbaf697db {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 662.814476] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 5d9eadff-7f13-4720-8119-5829b4802c21/5d9eadff-7f13-4720-8119-5829b4802c21.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 662.818221] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f266902-5461-40aa-aedf-d2ef225b4c98 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.842447] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 662.842447] env[61545]: value = "task-4255336" [ 662.842447] env[61545]: _type = "Task" [ 662.842447] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.859047] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255336, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.957642] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.957642] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.958307] env[61545]: DEBUG nova.network.neutron [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.058155] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fcf540b-a611-4255-a79b-15a838b76fd9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.068924] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquiring lock "8214216a-0256-467e-ac4c-1d14b0f73b77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.069185] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "8214216a-0256-467e-ac4c-1d14b0f73b77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.070450] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.074641] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1d74a9-93ef-466c-804c-c1a7fd17213d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.107116] env[61545]: DEBUG nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 663.115305] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622671c4-d913-4258-b646-d9ecc338c977 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.127631] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23edcba-9f64-42dc-a4fa-13eaa645382b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.131195] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255335, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.144281] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 663.177802] env[61545]: INFO nova.compute.manager [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Took 18.39 seconds to build instance. [ 663.252101] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255331, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.356088] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255336, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.540084] env[61545]: DEBUG nova.network.neutron [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.627416] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255335, 'name': CreateVM_Task, 'duration_secs': 0.790695} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.627516] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.628154] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.628321] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.628705] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 663.631427] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-564b72b3-099e-487a-82a0-0970f4f39a44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.637884] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 663.637884] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525eacc5-39d4-3608-6d00-771816fe58b5" [ 663.637884] env[61545]: _type = "Task" [ 663.637884] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.638892] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.652381] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525eacc5-39d4-3608-6d00-771816fe58b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.678235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fbaf744f-02d8-4498-a86f-6c694189482c tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "579fb20b-083f-4227-9a13-c0f1ea36e272" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.898s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.694269] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 19 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 663.694269] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 19 to 20 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 663.694269] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 663.759899] env[61545]: DEBUG oslo_vmware.api [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255331, 'name': PowerOnVM_Task, 'duration_secs': 1.125576} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.763045] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 663.763045] env[61545]: INFO nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Took 12.77 seconds to spawn the instance on the hypervisor. [ 663.763045] env[61545]: DEBUG nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 663.763045] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998b52d7-7b53-4293-af4c-1232c74d731f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.841481] env[61545]: DEBUG nova.network.neutron [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Updated VIF entry in instance network info cache for port bd5363b5-74ac-467c-9834-a90fbaf697db. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 663.841481] env[61545]: DEBUG nova.network.neutron [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Updating instance_info_cache with network_info: [{"id": "bd5363b5-74ac-467c-9834-a90fbaf697db", "address": "fa:16:3e:44:d3:13", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd5363b5-74", "ovs_interfaceid": "bd5363b5-74ac-467c-9834-a90fbaf697db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.854051] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255336, 'name': ReconfigVM_Task, 'duration_secs': 0.63588} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.854522] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 5d9eadff-7f13-4720-8119-5829b4802c21/5d9eadff-7f13-4720-8119-5829b4802c21.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 663.855581] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9751b7a7-8cba-4118-adba-fd91add3dd64 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.863585] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 663.863585] env[61545]: value = "task-4255337" [ 663.863585] env[61545]: _type = "Task" [ 663.863585] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.872558] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255337, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.898942] env[61545]: DEBUG nova.network.neutron [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.150496] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525eacc5-39d4-3608-6d00-771816fe58b5, 'name': SearchDatastore_Task, 'duration_secs': 0.015312} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.150824] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.152707] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.152707] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.152707] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.152707] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.152707] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0032bc64-4e9f-4b8e-8c28-7aae4dd6943b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.162289] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.162465] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 664.163377] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb636201-3246-4ade-9877-56ae24a6edb5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.171096] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 664.171096] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52695939-8c20-0533-d3ff-e6bb1cc4cd62" [ 664.171096] env[61545]: _type = "Task" [ 664.171096] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.180946] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52695939-8c20-0533-d3ff-e6bb1cc4cd62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.184832] env[61545]: DEBUG nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.198107] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 664.198328] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.080s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.198586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.633s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.208793] env[61545]: INFO nova.compute.claims [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.297424] env[61545]: INFO nova.compute.manager [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Took 20.04 seconds to build instance. [ 664.345990] env[61545]: DEBUG oslo_concurrency.lockutils [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] Releasing lock "refresh_cache-5d9eadff-7f13-4720-8119-5829b4802c21" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.346343] env[61545]: DEBUG nova.compute.manager [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Received event network-vif-plugged-d33a6a2d-6310-4263-adf4-dcf09ce72a6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 664.346563] env[61545]: DEBUG oslo_concurrency.lockutils [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] Acquiring lock "eced4107-b99e-479e-b22c-2157320ecf95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.346811] env[61545]: DEBUG oslo_concurrency.lockutils [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] Lock "eced4107-b99e-479e-b22c-2157320ecf95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.347040] env[61545]: DEBUG oslo_concurrency.lockutils [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] Lock "eced4107-b99e-479e-b22c-2157320ecf95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.347255] env[61545]: DEBUG nova.compute.manager [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] No waiting events found dispatching network-vif-plugged-d33a6a2d-6310-4263-adf4-dcf09ce72a6b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 664.347476] env[61545]: WARNING nova.compute.manager [req-36aa72dc-9e74-46e8-8a8e-5bf0f05c900a req-3746a827-9413-45be-8b85-ab4025203c5a service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Received unexpected event network-vif-plugged-d33a6a2d-6310-4263-adf4-dcf09ce72a6b for instance with vm_state building and task_state spawning. [ 664.375351] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255337, 'name': Rename_Task, 'duration_secs': 0.309218} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.375630] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 664.375842] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0fcb02b3-0582-45c7-b620-62ed5c1f395e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.383288] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 664.383288] env[61545]: value = "task-4255338" [ 664.383288] env[61545]: _type = "Task" [ 664.383288] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.392650] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255338, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.402427] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.402793] env[61545]: DEBUG nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Instance network_info: |[{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 664.403343] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:8a:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.412046] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Creating folder: Project (aa2360863a5f4eff8a88eca0c88fa76d). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.412419] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb567c10-1999-4fe2-9d40-5eb5930be94d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.424183] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Created folder: Project (aa2360863a5f4eff8a88eca0c88fa76d) in parent group-v838542. [ 664.424424] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Creating folder: Instances. Parent ref: group-v838561. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.424703] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-558d104d-0530-4157-80e4-b8bf0fbef036 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.436054] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Created folder: Instances in parent group-v838561. [ 664.436054] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 664.436225] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 664.436366] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d127fe84-d81f-4d5b-9685-f5fa3d075bde {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.461585] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.461585] env[61545]: value = "task-4255341" [ 664.461585] env[61545]: _type = "Task" [ 664.461585] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.470287] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255341, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.684701] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52695939-8c20-0533-d3ff-e6bb1cc4cd62, 'name': SearchDatastore_Task, 'duration_secs': 0.021294} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.686038] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38691558-3d7a-41e0-8c63-311456da2160 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.696732] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 664.696732] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524c00f2-3f39-0c04-210f-959e8615a0f3" [ 664.696732] env[61545]: _type = "Task" [ 664.696732] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.708990] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524c00f2-3f39-0c04-210f-959e8615a0f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.714044] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.781922] env[61545]: DEBUG nova.compute.manager [None req-4fdfb074-f8cd-4422-9766-adc289a50155 tempest-ServerDiagnosticsV248Test-1216446773 tempest-ServerDiagnosticsV248Test-1216446773-project-admin] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 664.784509] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b383b5-0078-4b5e-988b-87ae4e953ea1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.796961] env[61545]: INFO nova.compute.manager [None req-4fdfb074-f8cd-4422-9766-adc289a50155 tempest-ServerDiagnosticsV248Test-1216446773 tempest-ServerDiagnosticsV248Test-1216446773-project-admin] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Retrieving diagnostics [ 664.798960] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bdb07d-a51e-4a1b-8989-90501a6987fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.801435] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Acquiring lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.801435] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.801580] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Acquiring lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.801685] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.801853] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.803554] env[61545]: DEBUG oslo_concurrency.lockutils [None req-79cde313-821e-447d-bf08-8bc0cc30fc82 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "256e48c1-81de-4d32-97dc-ba80541a9239" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.560s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.804038] env[61545]: INFO nova.compute.manager [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Terminating instance [ 664.895582] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255338, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.971560] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255341, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.134564] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.135518] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.209316] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524c00f2-3f39-0c04-210f-959e8615a0f3, 'name': SearchDatastore_Task, 'duration_secs': 0.013734} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.209625] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.210122] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] eced4107-b99e-479e-b22c-2157320ecf95/eced4107-b99e-479e-b22c-2157320ecf95.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 665.210561] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a03707bc-dfdb-4dae-9b91-848ee2c33e3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.223848] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 665.223848] env[61545]: value = "task-4255342" [ 665.223848] env[61545]: _type = "Task" [ 665.223848] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.237300] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.317019] env[61545]: DEBUG nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 665.318848] env[61545]: DEBUG nova.compute.manager [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 665.319082] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 665.321538] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deaa508f-b0dd-4dea-9fc9-14888b36ab80 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.337499] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.338252] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c33b7dba-fcad-47d9-9c04-486d8479dfb3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.347763] env[61545]: DEBUG oslo_vmware.api [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Waiting for the task: (returnval){ [ 665.347763] env[61545]: value = "task-4255343" [ 665.347763] env[61545]: _type = "Task" [ 665.347763] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.361572] env[61545]: DEBUG oslo_vmware.api [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Task: {'id': task-4255343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.402560] env[61545]: DEBUG oslo_vmware.api [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255338, 'name': PowerOnVM_Task, 'duration_secs': 0.941906} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.403260] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 665.403581] env[61545]: INFO nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Took 12.14 seconds to spawn the instance on the hypervisor. [ 665.403761] env[61545]: DEBUG nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 665.404669] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f893ca03-d44c-467f-bbd4-1490d0d19c15 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.478893] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255341, 'name': CreateVM_Task, 'duration_secs': 0.604928} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.479108] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.480284] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.480284] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.480480] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 665.483733] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c993a9e2-e496-4ea1-9d92-1a467cd96585 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.490637] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 665.490637] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523b49d8-ce1e-1900-87e7-2c3b8722fa14" [ 665.490637] env[61545]: _type = "Task" [ 665.490637] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.502147] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523b49d8-ce1e-1900-87e7-2c3b8722fa14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.597702] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69399bc-b4c3-4c40-8aec-97433164a6fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.609960] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6d4692-e559-4d82-a7e0-293f8ed28a8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.649844] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97532e46-1364-4e5b-9601-588856670855 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.661268] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5cc197-ad7c-468c-816b-3ee08c56974d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.681198] env[61545]: DEBUG nova.compute.provider_tree [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 665.735044] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255342, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.843106] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.862323] env[61545]: DEBUG oslo_vmware.api [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Task: {'id': task-4255343, 'name': PowerOffVM_Task, 'duration_secs': 0.35314} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.862713] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 665.862839] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 665.863126] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-750fddb1-c136-48e4-8940-6dc560c24918 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.934928] env[61545]: INFO nova.compute.manager [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Took 21.17 seconds to build instance. [ 665.938153] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 665.939225] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 665.939225] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Deleting the datastore file [datastore2] 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 665.939381] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8d6c922-69f3-4eda-8f7f-e5108e97317e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.949434] env[61545]: DEBUG oslo_vmware.api [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Waiting for the task: (returnval){ [ 665.949434] env[61545]: value = "task-4255345" [ 665.949434] env[61545]: _type = "Task" [ 665.949434] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.962761] env[61545]: DEBUG oslo_vmware.api [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Task: {'id': task-4255345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.002917] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523b49d8-ce1e-1900-87e7-2c3b8722fa14, 'name': SearchDatastore_Task, 'duration_secs': 0.0715} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.003359] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.003480] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 666.003744] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.003862] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.004055] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 666.004334] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2721d98-0c66-4702-8de5-5e65fd28be83 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.014539] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 666.014747] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 666.015635] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca7cb3dd-961d-4c34-b002-dd79d8313c0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.024060] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 666.024060] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5221d2c5-2dee-9a15-ba78-a7b4e240a374" [ 666.024060] env[61545]: _type = "Task" [ 666.024060] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.035530] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5221d2c5-2dee-9a15-ba78-a7b4e240a374, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.212288] env[61545]: ERROR nova.scheduler.client.report [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [req-94316318-37d0-49b2-b2ca-0f34d94e6bc0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-94316318-37d0-49b2-b2ca-0f34d94e6bc0"}]} [ 666.234757] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663307} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.235541] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] eced4107-b99e-479e-b22c-2157320ecf95/eced4107-b99e-479e-b22c-2157320ecf95.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 666.235541] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 666.235746] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81eb5003-bb7e-4e0e-889f-c1de96feac5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.243466] env[61545]: DEBUG nova.scheduler.client.report [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 666.246936] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 666.246936] env[61545]: value = "task-4255346" [ 666.246936] env[61545]: _type = "Task" [ 666.246936] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.259663] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.267455] env[61545]: DEBUG nova.scheduler.client.report [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 666.267714] env[61545]: DEBUG nova.compute.provider_tree [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 666.289903] env[61545]: DEBUG nova.scheduler.client.report [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 666.313519] env[61545]: DEBUG nova.scheduler.client.report [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 666.440477] env[61545]: DEBUG oslo_concurrency.lockutils [None req-855b2950-aa73-48c8-8ea2-29a19a4c0826 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "5d9eadff-7f13-4720-8119-5829b4802c21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.686s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.463225] env[61545]: DEBUG oslo_vmware.api [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Task: {'id': task-4255345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240849} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.464028] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.464028] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 666.464028] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.464028] env[61545]: INFO nova.compute.manager [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 666.464205] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 666.464675] env[61545]: DEBUG nova.compute.manager [-] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 666.464675] env[61545]: DEBUG nova.network.neutron [-] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 666.538622] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5221d2c5-2dee-9a15-ba78-a7b4e240a374, 'name': SearchDatastore_Task, 'duration_secs': 0.011823} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.545442] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-642ea720-725e-423b-b201-748ada7e3da6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.553338] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 666.553338] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e183b4-de81-07d6-c3f1-567087421f6f" [ 666.553338] env[61545]: _type = "Task" [ 666.553338] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.571096] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e183b4-de81-07d6-c3f1-567087421f6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.580692] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ddafbb-9c5e-46ed-a155-a5daa76956b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.589061] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ef7603-b3e5-4e74-8416-3e8c4217556d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.622957] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcad7809-7636-4488-a651-143754455dae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.631740] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd18ba1-4ae8-4ade-9f8e-54090e9cfa8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.649123] env[61545]: DEBUG nova.compute.provider_tree [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.758653] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114231} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.759056] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 666.759841] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8ec96e-72c1-49e9-9f10-d91a4dd523eb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.784588] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] eced4107-b99e-479e-b22c-2157320ecf95/eced4107-b99e-479e-b22c-2157320ecf95.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 666.784905] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-111d415d-9c85-43a9-826a-572c92e42d4b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.807901] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 666.807901] env[61545]: value = "task-4255347" [ 666.807901] env[61545]: _type = "Task" [ 666.807901] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.816979] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255347, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.942943] env[61545]: DEBUG nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 667.065735] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e183b4-de81-07d6-c3f1-567087421f6f, 'name': SearchDatastore_Task, 'duration_secs': 0.036123} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.065735] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.065846] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 844f01ed-4dae-4e13-9d1c-09a73f413201/844f01ed-4dae-4e13-9d1c-09a73f413201.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 667.066332] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c55c5ec-a73f-4cbd-b264-0e5281420508 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.075269] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 667.075269] env[61545]: value = "task-4255348" [ 667.075269] env[61545]: _type = "Task" [ 667.075269] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.084494] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.154888] env[61545]: DEBUG nova.scheduler.client.report [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.321863] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.471350] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.482878] env[61545]: DEBUG nova.compute.manager [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Received event network-changed-d33a6a2d-6310-4263-adf4-dcf09ce72a6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 667.483290] env[61545]: DEBUG nova.compute.manager [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Refreshing instance network info cache due to event network-changed-d33a6a2d-6310-4263-adf4-dcf09ce72a6b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 667.483545] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Acquiring lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.483740] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Acquired lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.483988] env[61545]: DEBUG nova.network.neutron [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Refreshing network info cache for port d33a6a2d-6310-4263-adf4-dcf09ce72a6b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.528684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquiring lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.528963] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.529403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquiring lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.532330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.532575] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.535320] env[61545]: INFO nova.compute.manager [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Terminating instance [ 667.587339] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255348, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.660956] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.462s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.661602] env[61545]: DEBUG nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 667.665126] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.003s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.666750] env[61545]: INFO nova.compute.claims [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 667.706337] env[61545]: DEBUG nova.network.neutron [-] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.822643] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.042381] env[61545]: DEBUG nova.compute.manager [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 668.042381] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.043748] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91b065f-cb39-442f-a9a7-cf3b26e2a74a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.055355] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.055612] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a29e3ac0-f29a-4d0c-9ee2-96073ac185c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.064257] env[61545]: DEBUG oslo_vmware.api [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 668.064257] env[61545]: value = "task-4255349" [ 668.064257] env[61545]: _type = "Task" [ 668.064257] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.078474] env[61545]: DEBUG oslo_vmware.api [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.089739] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255348, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790569} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.090112] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 844f01ed-4dae-4e13-9d1c-09a73f413201/844f01ed-4dae-4e13-9d1c-09a73f413201.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 668.090827] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 668.091183] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85d0b14d-c557-48d4-a659-fd7cbf274995 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.100117] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 668.100117] env[61545]: value = "task-4255350" [ 668.100117] env[61545]: _type = "Task" [ 668.100117] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.112415] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255350, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.172456] env[61545]: DEBUG nova.compute.utils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 668.179882] env[61545]: DEBUG nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 668.180062] env[61545]: DEBUG nova.network.neutron [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 668.208387] env[61545]: INFO nova.compute.manager [-] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Took 1.74 seconds to deallocate network for instance. [ 668.311607] env[61545]: DEBUG nova.policy [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6252bf3af7f4c4a9464791d884de264', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e59cf3bff474453a38297e54e84ec89', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 668.324319] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255347, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.577612] env[61545]: DEBUG oslo_vmware.api [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255349, 'name': PowerOffVM_Task, 'duration_secs': 0.223249} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.577935] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.578128] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 668.578877] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba2434c5-75e2-4a83-b58a-cad0b1e68ab4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.612034] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255350, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089459} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.612240] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 668.613221] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8107ca24-7264-4624-97f0-03e3de220e10 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.639462] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 844f01ed-4dae-4e13-9d1c-09a73f413201/844f01ed-4dae-4e13-9d1c-09a73f413201.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.640069] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a134a0e-c24c-4a0d-bd7f-de678257a9f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.661640] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 668.662513] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 668.662513] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Deleting the datastore file [datastore2] 1a551e66-1b98-44fd-ad16-c20113d9b1a6 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 668.662949] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-307a4977-7913-45b8-8bec-3a023527f41e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.669341] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 668.669341] env[61545]: value = "task-4255352" [ 668.669341] env[61545]: _type = "Task" [ 668.669341] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.671937] env[61545]: DEBUG oslo_vmware.api [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for the task: (returnval){ [ 668.671937] env[61545]: value = "task-4255353" [ 668.671937] env[61545]: _type = "Task" [ 668.671937] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.683493] env[61545]: DEBUG nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 668.698292] env[61545]: DEBUG oslo_vmware.api [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.698292] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255352, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.717660] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.769045] env[61545]: DEBUG nova.network.neutron [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updated VIF entry in instance network info cache for port d33a6a2d-6310-4263-adf4-dcf09ce72a6b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 668.769045] env[61545]: DEBUG nova.network.neutron [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance_info_cache with network_info: [{"id": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "address": "fa:16:3e:1f:b2:06", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd33a6a2d-63", "ovs_interfaceid": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.824523] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255347, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.972585] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c887589-94d5-467b-b850-6030952b18d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.980539] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3b5124-3fad-4543-b7b6-914df74e246f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.015008] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f2ec1c-0957-49cb-b991-b3e248fbd857 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.023597] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27673007-ea56-4492-83bb-12ecadbf0623 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.038129] env[61545]: DEBUG nova.compute.provider_tree [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.040145] env[61545]: DEBUG nova.network.neutron [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Successfully created port: 4823df3e-d5b5-411f-b835-7dddc654e899 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.187984] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255352, 'name': ReconfigVM_Task, 'duration_secs': 0.297509} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.191230] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 844f01ed-4dae-4e13-9d1c-09a73f413201/844f01ed-4dae-4e13-9d1c-09a73f413201.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.192409] env[61545]: DEBUG oslo_vmware.api [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.192642] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20072578-f336-4aaf-8e58-5bf42a0af55f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.205153] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 669.205153] env[61545]: value = "task-4255354" [ 669.205153] env[61545]: _type = "Task" [ 669.205153] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.214043] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255354, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.270527] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Releasing lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.270809] env[61545]: DEBUG nova.compute.manager [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Received event network-vif-plugged-a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 669.271019] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Acquiring lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.271256] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.271420] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.271817] env[61545]: DEBUG nova.compute.manager [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] No waiting events found dispatching network-vif-plugged-a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 669.272266] env[61545]: WARNING nova.compute.manager [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Received unexpected event network-vif-plugged-a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 for instance with vm_state building and task_state spawning. [ 669.272266] env[61545]: DEBUG nova.compute.manager [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Received event network-changed-a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 669.272705] env[61545]: DEBUG nova.compute.manager [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Refreshing instance network info cache due to event network-changed-a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 669.272832] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Acquiring lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.273248] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Acquired lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.273737] env[61545]: DEBUG nova.network.neutron [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Refreshing network info cache for port a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.291871] env[61545]: DEBUG nova.compute.manager [None req-6fc78d7b-1cdb-4c82-a472-a03c34d5a4f5 tempest-ServerDiagnosticsTest-1920106125 tempest-ServerDiagnosticsTest-1920106125-project-admin] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 669.293754] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a371a5-272d-4b3c-aa43-2c5060a24c03 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.303126] env[61545]: INFO nova.compute.manager [None req-6fc78d7b-1cdb-4c82-a472-a03c34d5a4f5 tempest-ServerDiagnosticsTest-1920106125 tempest-ServerDiagnosticsTest-1920106125-project-admin] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Retrieving diagnostics [ 669.304296] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8f8c64-2f8c-4b53-80e3-41d9713281aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.360610] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255347, 'name': ReconfigVM_Task, 'duration_secs': 2.100527} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.360610] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Reconfigured VM instance instance-00000006 to attach disk [datastore2] eced4107-b99e-479e-b22c-2157320ecf95/eced4107-b99e-479e-b22c-2157320ecf95.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.361189] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24625f31-edca-4899-a457-060ebb50002f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.369789] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 669.369789] env[61545]: value = "task-4255355" [ 669.369789] env[61545]: _type = "Task" [ 669.369789] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.384494] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255355, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.543738] env[61545]: DEBUG nova.scheduler.client.report [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.689193] env[61545]: DEBUG oslo_vmware.api [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Task: {'id': task-4255353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.559318} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.689193] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.689193] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 669.689193] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.689193] env[61545]: INFO nova.compute.manager [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Took 1.65 seconds to destroy the instance on the hypervisor. [ 669.689370] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 669.689370] env[61545]: DEBUG nova.compute.manager [-] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 669.689370] env[61545]: DEBUG nova.network.neutron [-] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.710590] env[61545]: DEBUG nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 669.718673] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255354, 'name': Rename_Task, 'duration_secs': 0.155591} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.719186] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.719346] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b700d78e-e0b0-4c2a-87ee-5a671d7ddec2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.726028] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 669.726028] env[61545]: value = "task-4255356" [ 669.726028] env[61545]: _type = "Task" [ 669.726028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.738535] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.741402] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 669.741692] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.741928] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 669.742201] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.742614] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 669.742801] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 669.743187] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 669.743422] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 669.743633] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 669.743826] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 669.744036] env[61545]: DEBUG nova.virt.hardware [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 669.745711] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd32905-24f9-4bcc-a9b7-c563de6990a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.753655] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc751841-d220-4561-b703-8e03845d4e50 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.881416] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255355, 'name': Rename_Task, 'duration_secs': 0.344} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.881416] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.881416] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8707ba4-bd1b-42b9-9b31-59e77b22996b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.890297] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 669.890297] env[61545]: value = "task-4255357" [ 669.890297] env[61545]: _type = "Task" [ 669.890297] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.899545] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.049979] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.051383] env[61545]: DEBUG nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 670.054585] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.137s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.056359] env[61545]: INFO nova.compute.claims [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.239212] env[61545]: DEBUG oslo_vmware.api [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255356, 'name': PowerOnVM_Task, 'duration_secs': 0.49844} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.241328] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.241328] env[61545]: INFO nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Took 10.08 seconds to spawn the instance on the hypervisor. [ 670.241644] env[61545]: DEBUG nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.243288] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee46cf6-23e7-49d2-857c-0d55441a1433 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.405299] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255357, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.424125] env[61545]: DEBUG nova.network.neutron [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updated VIF entry in instance network info cache for port a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 670.424503] env[61545]: DEBUG nova.network.neutron [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.565030] env[61545]: DEBUG nova.compute.utils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 670.570475] env[61545]: DEBUG nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 670.570833] env[61545]: DEBUG nova.network.neutron [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 670.592308] env[61545]: DEBUG nova.compute.manager [req-26c2923d-6332-4200-9a0f-07ec3568cdbe req-46f0ebb9-89f2-49dc-aaad-851740369149 service nova] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Received event network-vif-deleted-d548df43-1a40-42e8-88e2-6487812a1612 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 670.764128] env[61545]: DEBUG nova.policy [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87ecc6c8dd334b988b6ca501152d2829', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '904a34121aff449db58eaa92ccfbe556', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 670.775856] env[61545]: INFO nova.compute.manager [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Took 16.24 seconds to build instance. [ 670.884268] env[61545]: DEBUG nova.network.neutron [-] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.907328] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255357, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.927942] env[61545]: DEBUG oslo_concurrency.lockutils [req-cf92f273-6ec9-44b5-966f-65cdcb924503 req-667ce358-f65d-4656-8f97-5abc61321a07 service nova] Releasing lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.070940] env[61545]: DEBUG nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 671.284396] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42f423b9-9999-4947-8ee0-e75534250289 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.775s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.388372] env[61545]: INFO nova.compute.manager [-] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Took 1.70 seconds to deallocate network for instance. [ 671.403385] env[61545]: DEBUG oslo_vmware.api [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255357, 'name': PowerOnVM_Task, 'duration_secs': 1.451958} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.406432] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.406644] env[61545]: INFO nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Took 13.59 seconds to spawn the instance on the hypervisor. [ 671.406973] env[61545]: DEBUG nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.412660] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab425f4-83bc-4f7c-a475-a92a24a48f75 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.431625] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b08a90-458f-4ecb-9baf-9e4f5d7774ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.446804] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abd4d90-69ed-4752-8d85-8f9d4ffc6684 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.490804] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8303f7a9-eabb-4b32-9575-89d0a2edb10d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.490804] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquiring lock "5d9eadff-7f13-4720-8119-5829b4802c21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.491254] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "5d9eadff-7f13-4720-8119-5829b4802c21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.491427] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquiring lock "5d9eadff-7f13-4720-8119-5829b4802c21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.492634] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "5d9eadff-7f13-4720-8119-5829b4802c21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.492634] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "5d9eadff-7f13-4720-8119-5829b4802c21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.495251] env[61545]: DEBUG nova.network.neutron [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Successfully updated port: 4823df3e-d5b5-411f-b835-7dddc654e899 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 671.499012] env[61545]: INFO nova.compute.manager [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Terminating instance [ 671.506190] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4aa886-0ed7-4858-8203-040c8aa235d8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.523868] env[61545]: DEBUG nova.compute.provider_tree [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.790568] env[61545]: DEBUG nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 671.820289] env[61545]: DEBUG nova.network.neutron [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Successfully created port: e19a0a22-1c4e-4aa7-94cf-a5e630bb1857 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.898751] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.934576] env[61545]: INFO nova.compute.manager [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Took 26.28 seconds to build instance. [ 671.999391] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.999840] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquired lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.999840] env[61545]: DEBUG nova.network.neutron [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.008019] env[61545]: DEBUG nova.compute.manager [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 672.008019] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 672.008019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46413de4-c5a6-43ce-9117-bfc29935963c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.017549] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 672.017820] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2805d22d-674b-4768-9196-8f40c6a9d90f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.027035] env[61545]: DEBUG nova.scheduler.client.report [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.032078] env[61545]: DEBUG oslo_vmware.api [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 672.032078] env[61545]: value = "task-4255358" [ 672.032078] env[61545]: _type = "Task" [ 672.032078] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.043116] env[61545]: DEBUG oslo_vmware.api [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.080553] env[61545]: DEBUG nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 672.118866] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 672.119467] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.119701] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 672.119929] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.120147] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 672.120344] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 672.120817] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 672.120817] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 672.121434] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 672.121792] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 672.122034] env[61545]: DEBUG nova.virt.hardware [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 672.123364] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d123cfae-550d-45cb-96d1-d030243e21e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.134263] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf94980-d1ad-43ce-b567-0d43478eb4a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.318178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.437074] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eacad240-efe8-4577-b004-d6e386663492 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.795s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.537686] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.538428] env[61545]: DEBUG nova.compute.manager [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 672.545192] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.474s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.546600] env[61545]: INFO nova.compute.claims [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.549515] env[61545]: DEBUG oslo_vmware.api [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255358, 'name': PowerOffVM_Task, 'duration_secs': 0.210308} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.550333] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 672.550453] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 672.550716] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f647b29d-f3cc-480c-91af-5f5a93ed3671 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.563030] env[61545]: DEBUG nova.network.neutron [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.623417] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 672.623655] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 672.624138] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Deleting the datastore file [datastore2] 5d9eadff-7f13-4720-8119-5829b4802c21 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 672.624138] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-909b8c7e-4e4a-4fd1-abd0-82ea62390613 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.633682] env[61545]: DEBUG oslo_vmware.api [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for the task: (returnval){ [ 672.633682] env[61545]: value = "task-4255360" [ 672.633682] env[61545]: _type = "Task" [ 672.633682] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.646198] env[61545]: DEBUG oslo_vmware.api [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.913433] env[61545]: DEBUG nova.network.neutron [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Updating instance_info_cache with network_info: [{"id": "4823df3e-d5b5-411f-b835-7dddc654e899", "address": "fa:16:3e:20:5e:b9", "network": {"id": "2437aa91-36dd-4084-a14b-9ed28ba4c29f", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-602700299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e59cf3bff474453a38297e54e84ec89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4823df3e-d5", "ovs_interfaceid": "4823df3e-d5b5-411f-b835-7dddc654e899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.051392] env[61545]: DEBUG nova.compute.utils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 673.056702] env[61545]: DEBUG nova.compute.manager [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 673.149481] env[61545]: DEBUG oslo_vmware.api [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Task: {'id': task-4255360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15946} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.149738] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 673.150282] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 673.150487] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 673.151053] env[61545]: INFO nova.compute.manager [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Took 1.14 seconds to destroy the instance on the hypervisor. [ 673.151335] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.151548] env[61545]: DEBUG nova.compute.manager [-] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 673.151827] env[61545]: DEBUG nova.network.neutron [-] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.414394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquiring lock "256e48c1-81de-4d32-97dc-ba80541a9239" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.414780] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "256e48c1-81de-4d32-97dc-ba80541a9239" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.415069] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquiring lock "256e48c1-81de-4d32-97dc-ba80541a9239-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.415300] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "256e48c1-81de-4d32-97dc-ba80541a9239-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.415530] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "256e48c1-81de-4d32-97dc-ba80541a9239-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.418043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Releasing lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.418314] env[61545]: DEBUG nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Instance network_info: |[{"id": "4823df3e-d5b5-411f-b835-7dddc654e899", "address": "fa:16:3e:20:5e:b9", "network": {"id": "2437aa91-36dd-4084-a14b-9ed28ba4c29f", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-602700299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e59cf3bff474453a38297e54e84ec89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4823df3e-d5", "ovs_interfaceid": "4823df3e-d5b5-411f-b835-7dddc654e899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 673.419801] env[61545]: INFO nova.compute.manager [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Terminating instance [ 673.422184] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:5e:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4823df3e-d5b5-411f-b835-7dddc654e899', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 673.435567] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Creating folder: Project (9e59cf3bff474453a38297e54e84ec89). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 673.437882] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8888108c-b830-49ab-80d8-fdb0cd8da348 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.453949] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Created folder: Project (9e59cf3bff474453a38297e54e84ec89) in parent group-v838542. [ 673.453949] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Creating folder: Instances. Parent ref: group-v838564. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 673.453949] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a08b73d-689d-4de1-a933-ff0c2ae829b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.467918] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Created folder: Instances in parent group-v838564. [ 673.467918] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.467918] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 673.470652] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01f09ce1-3a4a-40a8-817c-9eabc0d39ada {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.493872] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 673.493872] env[61545]: value = "task-4255363" [ 673.493872] env[61545]: _type = "Task" [ 673.493872] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.503750] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255363, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.556433] env[61545]: DEBUG nova.compute.manager [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 673.706902] env[61545]: DEBUG nova.compute.manager [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Received event network-vif-deleted-25b3b54b-4b63-407e-bd50-625dca707982 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 673.707201] env[61545]: DEBUG nova.compute.manager [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Received event network-vif-plugged-4823df3e-d5b5-411f-b835-7dddc654e899 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 673.707467] env[61545]: DEBUG oslo_concurrency.lockutils [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] Acquiring lock "4f879b20-bae0-4d50-b5e9-378356341962-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.707993] env[61545]: DEBUG oslo_concurrency.lockutils [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] Lock "4f879b20-bae0-4d50-b5e9-378356341962-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.707993] env[61545]: DEBUG oslo_concurrency.lockutils [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] Lock "4f879b20-bae0-4d50-b5e9-378356341962-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.707993] env[61545]: DEBUG nova.compute.manager [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] No waiting events found dispatching network-vif-plugged-4823df3e-d5b5-411f-b835-7dddc654e899 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 673.710686] env[61545]: WARNING nova.compute.manager [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Received unexpected event network-vif-plugged-4823df3e-d5b5-411f-b835-7dddc654e899 for instance with vm_state building and task_state spawning. [ 673.710884] env[61545]: DEBUG nova.compute.manager [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Received event network-changed-4823df3e-d5b5-411f-b835-7dddc654e899 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 673.711120] env[61545]: DEBUG nova.compute.manager [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Refreshing instance network info cache due to event network-changed-4823df3e-d5b5-411f-b835-7dddc654e899. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 673.711418] env[61545]: DEBUG oslo_concurrency.lockutils [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] Acquiring lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.711666] env[61545]: DEBUG oslo_concurrency.lockutils [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] Acquired lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.711813] env[61545]: DEBUG nova.network.neutron [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Refreshing network info cache for port 4823df3e-d5b5-411f-b835-7dddc654e899 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 673.885850] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9be89a9-10ee-4ffc-a59c-c0a0a6726461 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.894238] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c477e6b8-f962-4656-9b95-e642abd9a104 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.929175] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b80b5e3-8c05-4360-b558-594700b76b38 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.937672] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10985003-46ab-4b77-bb5c-cd2c87289912 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.942635] env[61545]: DEBUG nova.compute.manager [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 673.942877] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 673.944487] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e1ddab-d681-4108-a8ec-b2e9ff17d305 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.959073] env[61545]: DEBUG nova.compute.provider_tree [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.961427] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 673.961914] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0624119-9af1-478e-b2ec-3546af4d6f24 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.970216] env[61545]: DEBUG oslo_vmware.api [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 673.970216] env[61545]: value = "task-4255364" [ 673.970216] env[61545]: _type = "Task" [ 673.970216] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.981222] env[61545]: DEBUG oslo_vmware.api [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255364, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.004604] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255363, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.096953] env[61545]: DEBUG nova.network.neutron [-] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.464450] env[61545]: DEBUG nova.scheduler.client.report [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 674.484549] env[61545]: DEBUG oslo_vmware.api [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255364, 'name': PowerOffVM_Task, 'duration_secs': 0.385891} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.485090] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 674.485501] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 674.486162] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2ae363b-62ce-497e-8574-d22667c078cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.510065] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255363, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.581489] env[61545]: DEBUG nova.compute.manager [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 674.583044] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 674.585353] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 674.585353] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Deleting the datastore file [datastore2] 256e48c1-81de-4d32-97dc-ba80541a9239 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 674.585353] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61f3b86a-bb47-4a39-8a51-97723b881eed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.599073] env[61545]: DEBUG oslo_vmware.api [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for the task: (returnval){ [ 674.599073] env[61545]: value = "task-4255366" [ 674.599073] env[61545]: _type = "Task" [ 674.599073] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.605964] env[61545]: INFO nova.compute.manager [-] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Took 1.45 seconds to deallocate network for instance. [ 674.614554] env[61545]: DEBUG oslo_vmware.api [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.631991] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 674.632387] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 674.632387] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 674.632572] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 674.632775] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 674.632864] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 674.635623] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 674.636706] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 674.636706] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 674.636706] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 674.636706] env[61545]: DEBUG nova.virt.hardware [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 674.637382] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854cc78d-d4d0-4325-97a5-c90d23b91951 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.651057] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d358fd6-a096-47f0-9468-471cb4f194d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.671762] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.682805] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Creating folder: Project (3014a2483865468784ba042a3cb2eb87). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 674.688555] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddde5284-6ad0-455a-84f2-ad98c304af4f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.704746] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Created folder: Project (3014a2483865468784ba042a3cb2eb87) in parent group-v838542. [ 674.705070] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Creating folder: Instances. Parent ref: group-v838567. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 674.706063] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f4ca36f-6fc4-4002-8c19-a6a3837ab6e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.719133] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Created folder: Instances in parent group-v838567. [ 674.719133] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 674.719133] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 674.719133] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9b35fd1-0eee-4abe-8125-6dea70abee5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.739133] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.739133] env[61545]: value = "task-4255369" [ 674.739133] env[61545]: _type = "Task" [ 674.739133] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.748450] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255369, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.971732] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.973240] env[61545]: DEBUG nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 674.975963] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.337s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.980595] env[61545]: INFO nova.compute.claims [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.007752] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255363, 'name': CreateVM_Task, 'duration_secs': 1.099423} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.007974] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.009040] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.010037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.010037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 675.010037] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf02b320-cc53-4404-882d-0a5d75d42a9c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.019977] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 675.019977] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52162d3d-39d5-cfc4-1574-b8763d388d0c" [ 675.019977] env[61545]: _type = "Task" [ 675.019977] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.039046] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52162d3d-39d5-cfc4-1574-b8763d388d0c, 'name': SearchDatastore_Task, 'duration_secs': 0.011374} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.039046] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.039046] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 675.039046] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.039328] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.039328] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 675.039328] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-085cfff6-ad9c-4d13-a336-da03523739b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.050413] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 675.050623] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 675.051382] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72c50fca-64c5-40f3-b3dd-1b441a94c24a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.059721] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 675.059721] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523eb95a-a43d-2baf-c6ac-7af59112a2e1" [ 675.059721] env[61545]: _type = "Task" [ 675.059721] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.075320] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523eb95a-a43d-2baf-c6ac-7af59112a2e1, 'name': SearchDatastore_Task, 'duration_secs': 0.011285} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.077143] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6353982b-1668-4ffe-9871-55dac1806161 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.084513] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 675.084513] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a0806c-57c5-bd56-bb8e-a74e6233dcce" [ 675.084513] env[61545]: _type = "Task" [ 675.084513] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.094219] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a0806c-57c5-bd56-bb8e-a74e6233dcce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.107891] env[61545]: DEBUG oslo_vmware.api [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Task: {'id': task-4255366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158197} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.108292] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.108481] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 675.108659] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.108827] env[61545]: INFO nova.compute.manager [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Took 1.17 seconds to destroy the instance on the hypervisor. [ 675.109077] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.109292] env[61545]: DEBUG nova.compute.manager [-] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 675.109455] env[61545]: DEBUG nova.network.neutron [-] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.117344] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.121585] env[61545]: DEBUG nova.compute.manager [req-ca1397ca-302f-4d62-a046-119841635081 req-c8f79820-5e36-4d0e-8de7-c0c54174dc6a service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Received event network-changed-a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 675.122182] env[61545]: DEBUG nova.compute.manager [req-ca1397ca-302f-4d62-a046-119841635081 req-c8f79820-5e36-4d0e-8de7-c0c54174dc6a service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Refreshing instance network info cache due to event network-changed-a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 675.122182] env[61545]: DEBUG oslo_concurrency.lockutils [req-ca1397ca-302f-4d62-a046-119841635081 req-c8f79820-5e36-4d0e-8de7-c0c54174dc6a service nova] Acquiring lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.122182] env[61545]: DEBUG oslo_concurrency.lockutils [req-ca1397ca-302f-4d62-a046-119841635081 req-c8f79820-5e36-4d0e-8de7-c0c54174dc6a service nova] Acquired lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.122373] env[61545]: DEBUG nova.network.neutron [req-ca1397ca-302f-4d62-a046-119841635081 req-c8f79820-5e36-4d0e-8de7-c0c54174dc6a service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Refreshing network info cache for port a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 675.258727] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255369, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.425444] env[61545]: DEBUG nova.network.neutron [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Successfully updated port: e19a0a22-1c4e-4aa7-94cf-a5e630bb1857 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 675.440030] env[61545]: DEBUG nova.network.neutron [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Updated VIF entry in instance network info cache for port 4823df3e-d5b5-411f-b835-7dddc654e899. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.440030] env[61545]: DEBUG nova.network.neutron [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Updating instance_info_cache with network_info: [{"id": "4823df3e-d5b5-411f-b835-7dddc654e899", "address": "fa:16:3e:20:5e:b9", "network": {"id": "2437aa91-36dd-4084-a14b-9ed28ba4c29f", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-602700299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e59cf3bff474453a38297e54e84ec89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4823df3e-d5", "ovs_interfaceid": "4823df3e-d5b5-411f-b835-7dddc654e899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.478628] env[61545]: DEBUG nova.compute.utils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 675.481779] env[61545]: DEBUG nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 675.481779] env[61545]: DEBUG nova.network.neutron [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 675.598073] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a0806c-57c5-bd56-bb8e-a74e6233dcce, 'name': SearchDatastore_Task, 'duration_secs': 0.01065} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.598220] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.598625] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4f879b20-bae0-4d50-b5e9-378356341962/4f879b20-bae0-4d50-b5e9-378356341962.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 675.599056] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8be2b84-974b-4b00-86b9-85db13662691 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.608710] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 675.608710] env[61545]: value = "task-4255370" [ 675.608710] env[61545]: _type = "Task" [ 675.608710] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.620666] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.716586] env[61545]: DEBUG nova.policy [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '549fd3c20927494a8c73b5f96e54bc05', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d8c8b7d5250486a902f8655029c4f97', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 675.759148] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255369, 'name': CreateVM_Task, 'duration_secs': 0.626426} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.760326] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.770791] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.770791] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.770791] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 675.770791] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66b43d81-c853-4c77-ba43-e8eba4a99d1d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.782417] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 675.782417] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bf13c1-a979-c367-69fe-5cdab7ad9c8c" [ 675.782417] env[61545]: _type = "Task" [ 675.782417] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.797227] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bf13c1-a979-c367-69fe-5cdab7ad9c8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.929243] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "refresh_cache-8a3ac91d-8949-4745-9161-1a70899c0293" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.929537] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquired lock "refresh_cache-8a3ac91d-8949-4745-9161-1a70899c0293" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.929760] env[61545]: DEBUG nova.network.neutron [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 675.942598] env[61545]: DEBUG oslo_concurrency.lockutils [req-9f509495-a67e-497f-9b9d-a4681f080604 req-d1a4ae5c-d9ef-4eb3-b7ef-42e60353e138 service nova] Releasing lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.984121] env[61545]: DEBUG nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 676.119428] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255370, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.279661] env[61545]: DEBUG nova.compute.manager [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 676.296617] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bf13c1-a979-c367-69fe-5cdab7ad9c8c, 'name': SearchDatastore_Task, 'duration_secs': 0.082761} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.297024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.297323] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 676.297585] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.297755] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.297959] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.299060] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a36a0f1c-6457-43b4-a9e8-f7c30ac1b4bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.319770] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.319912] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 676.320839] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b88cdc7-3cb3-4c21-b18c-604ee1bc93fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.335285] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 676.335285] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9d720-2509-eae3-1fe7-fb81e4c5f261" [ 676.335285] env[61545]: _type = "Task" [ 676.335285] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.345284] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9d720-2509-eae3-1fe7-fb81e4c5f261, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.377523] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4243e98-6055-44ec-a4aa-98ce1adf596f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.387697] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38763e00-73da-4c46-8852-7e6b444744e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.423555] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d0d66b-ed4a-4e74-93c7-01c977f51f37 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.437258] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b984967e-985f-4c93-ab52-f80d28ec6b64 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.455189] env[61545]: DEBUG nova.compute.provider_tree [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.620820] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255370, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539682} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.621331] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4f879b20-bae0-4d50-b5e9-378356341962/4f879b20-bae0-4d50-b5e9-378356341962.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 676.621574] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.622079] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15ab8fb0-23a3-4455-a23a-8d3d8d69b605 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.629696] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 676.629696] env[61545]: value = "task-4255371" [ 676.629696] env[61545]: _type = "Task" [ 676.629696] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.642515] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255371, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.765043] env[61545]: DEBUG nova.network.neutron [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.809985] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.837113] env[61545]: DEBUG nova.network.neutron [-] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.860827] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9d720-2509-eae3-1fe7-fb81e4c5f261, 'name': SearchDatastore_Task, 'duration_secs': 0.010833} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.864989] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-600bc8d0-8fa7-4529-bc70-174ab611456f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.873022] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 676.873022] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520c6f10-7adb-a522-4dd0-4ca503c3c275" [ 676.873022] env[61545]: _type = "Task" [ 676.873022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.885022] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520c6f10-7adb-a522-4dd0-4ca503c3c275, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.962200] env[61545]: DEBUG nova.scheduler.client.report [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 676.995026] env[61545]: DEBUG nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 677.027939] env[61545]: DEBUG nova.network.neutron [req-ca1397ca-302f-4d62-a046-119841635081 req-c8f79820-5e36-4d0e-8de7-c0c54174dc6a service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updated VIF entry in instance network info cache for port a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 677.029045] env[61545]: DEBUG nova.network.neutron [req-ca1397ca-302f-4d62-a046-119841635081 req-c8f79820-5e36-4d0e-8de7-c0c54174dc6a service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.042253] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 677.042253] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.042253] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 677.042680] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.042680] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 677.042680] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 677.042680] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 677.042680] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 677.042851] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 677.042851] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 677.043017] env[61545]: DEBUG nova.virt.hardware [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 677.045554] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324fc91a-b2aa-46f3-ba13-fe04eb07f249 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.059167] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac2ef51-5f69-4342-b629-7ce3c01460ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.145115] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255371, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078863} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.145115] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.145533] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d14187-a4b6-4f16-a6ee-eebaa0f49b2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.174768] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 4f879b20-bae0-4d50-b5e9-378356341962/4f879b20-bae0-4d50-b5e9-378356341962.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.175991] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0e0cdc0-e36a-4200-b913-2474f369a111 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.192843] env[61545]: DEBUG nova.compute.manager [None req-f3161579-87e3-439e-af35-301f5e269441 tempest-ServerDiagnosticsV248Test-1216446773 tempest-ServerDiagnosticsV248Test-1216446773-project-admin] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 677.196688] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b6d6f7-ae2d-439e-8a87-7605e33752d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.205552] env[61545]: INFO nova.compute.manager [None req-f3161579-87e3-439e-af35-301f5e269441 tempest-ServerDiagnosticsV248Test-1216446773 tempest-ServerDiagnosticsV248Test-1216446773-project-admin] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Retrieving diagnostics [ 677.208037] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c52f0d-2e25-427c-9f59-2104253ecc07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.212116] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 677.212116] env[61545]: value = "task-4255372" [ 677.212116] env[61545]: _type = "Task" [ 677.212116] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.250974] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.311875] env[61545]: DEBUG nova.network.neutron [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Updating instance_info_cache with network_info: [{"id": "e19a0a22-1c4e-4aa7-94cf-a5e630bb1857", "address": "fa:16:3e:fb:18:62", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape19a0a22-1c", "ovs_interfaceid": "e19a0a22-1c4e-4aa7-94cf-a5e630bb1857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.342589] env[61545]: DEBUG nova.network.neutron [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Successfully created port: 400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.345407] env[61545]: INFO nova.compute.manager [-] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Took 2.24 seconds to deallocate network for instance. [ 677.393349] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520c6f10-7adb-a522-4dd0-4ca503c3c275, 'name': SearchDatastore_Task, 'duration_secs': 0.010722} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.393349] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.393349] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 677.393349] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e96a0497-a1aa-461d-bb6e-b7daafd94ebf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.403181] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 677.403181] env[61545]: value = "task-4255373" [ 677.403181] env[61545]: _type = "Task" [ 677.403181] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.412955] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.467610] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.467610] env[61545]: DEBUG nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 677.470235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.756s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.472046] env[61545]: INFO nova.compute.claims [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.539758] env[61545]: DEBUG oslo_concurrency.lockutils [req-ca1397ca-302f-4d62-a046-119841635081 req-c8f79820-5e36-4d0e-8de7-c0c54174dc6a service nova] Releasing lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.728974] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255372, 'name': ReconfigVM_Task, 'duration_secs': 0.442583} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.729330] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 4f879b20-bae0-4d50-b5e9-378356341962/4f879b20-bae0-4d50-b5e9-378356341962.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.731453] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edf32972-20d7-4886-994c-57bbaf7f9c35 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.741583] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 677.741583] env[61545]: value = "task-4255374" [ 677.741583] env[61545]: _type = "Task" [ 677.741583] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.763655] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255374, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.818226] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Releasing lock "refresh_cache-8a3ac91d-8949-4745-9161-1a70899c0293" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.818329] env[61545]: DEBUG nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Instance network_info: |[{"id": "e19a0a22-1c4e-4aa7-94cf-a5e630bb1857", "address": "fa:16:3e:fb:18:62", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape19a0a22-1c", "ovs_interfaceid": "e19a0a22-1c4e-4aa7-94cf-a5e630bb1857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 677.818841] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:18:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e19a0a22-1c4e-4aa7-94cf-a5e630bb1857', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 677.832281] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Creating folder: Project (904a34121aff449db58eaa92ccfbe556). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.832281] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c7df543-b263-4d16-950c-216f02589445 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.845712] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Created folder: Project (904a34121aff449db58eaa92ccfbe556) in parent group-v838542. [ 677.846511] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Creating folder: Instances. Parent ref: group-v838570. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.846511] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca752798-db8a-4de1-82ff-74f54d799608 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.856606] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.867109] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Created folder: Instances in parent group-v838570. [ 677.869148] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 677.869148] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 677.869148] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27daf084-23b9-4d7a-a303-02a71ca6656c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.895126] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 677.895126] env[61545]: value = "task-4255377" [ 677.895126] env[61545]: _type = "Task" [ 677.895126] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.910671] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255377, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.920083] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255373, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.976365] env[61545]: DEBUG nova.compute.manager [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Received event network-vif-deleted-bd5363b5-74ac-467c-9834-a90fbaf697db {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 677.977466] env[61545]: DEBUG nova.compute.manager [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Received event network-vif-plugged-e19a0a22-1c4e-4aa7-94cf-a5e630bb1857 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 677.977961] env[61545]: DEBUG oslo_concurrency.lockutils [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] Acquiring lock "8a3ac91d-8949-4745-9161-1a70899c0293-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.978296] env[61545]: DEBUG oslo_concurrency.lockutils [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] Lock "8a3ac91d-8949-4745-9161-1a70899c0293-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.978840] env[61545]: DEBUG oslo_concurrency.lockutils [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] Lock "8a3ac91d-8949-4745-9161-1a70899c0293-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.980464] env[61545]: DEBUG nova.compute.manager [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] No waiting events found dispatching network-vif-plugged-e19a0a22-1c4e-4aa7-94cf-a5e630bb1857 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 677.980464] env[61545]: WARNING nova.compute.manager [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Received unexpected event network-vif-plugged-e19a0a22-1c4e-4aa7-94cf-a5e630bb1857 for instance with vm_state building and task_state spawning. [ 677.980464] env[61545]: DEBUG nova.compute.manager [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Received event network-changed-e19a0a22-1c4e-4aa7-94cf-a5e630bb1857 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 677.980464] env[61545]: DEBUG nova.compute.manager [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Refreshing instance network info cache due to event network-changed-e19a0a22-1c4e-4aa7-94cf-a5e630bb1857. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 677.980464] env[61545]: DEBUG oslo_concurrency.lockutils [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] Acquiring lock "refresh_cache-8a3ac91d-8949-4745-9161-1a70899c0293" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.981756] env[61545]: DEBUG oslo_concurrency.lockutils [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] Acquired lock "refresh_cache-8a3ac91d-8949-4745-9161-1a70899c0293" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.981756] env[61545]: DEBUG nova.network.neutron [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Refreshing network info cache for port e19a0a22-1c4e-4aa7-94cf-a5e630bb1857 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 677.986256] env[61545]: DEBUG nova.compute.utils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 677.992622] env[61545]: DEBUG nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 677.992785] env[61545]: DEBUG nova.network.neutron [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.156616] env[61545]: DEBUG nova.policy [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3bdf488689af462dbfb665f91243105e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d941553019a47658b8a9366f722a571', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 678.264609] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255374, 'name': Rename_Task, 'duration_secs': 0.387631} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.264883] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 678.265039] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e5f7ddf-d1df-42cf-a150-0d7475b560b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.274037] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 678.274037] env[61545]: value = "task-4255378" [ 678.274037] env[61545]: _type = "Task" [ 678.274037] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.291044] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255378, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.415922] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255377, 'name': CreateVM_Task, 'duration_secs': 0.453241} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.420200] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 678.421104] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.421574] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.421838] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 678.422452] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0e9c4c5-5a56-48e9-ae52-55e301029709 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.428345] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686619} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.429921] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 678.430633] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 678.430633] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 678.430633] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5207fea5-a900-c7ad-9c98-e2f0fc4b61cf" [ 678.430633] env[61545]: _type = "Task" [ 678.430633] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.430919] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d85245e-fb17-4f28-8c29-fd62d59b6622 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.444293] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5207fea5-a900-c7ad-9c98-e2f0fc4b61cf, 'name': SearchDatastore_Task, 'duration_secs': 0.010648} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.446202] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.447068] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 678.447068] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.447068] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.447325] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 678.447734] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 678.447734] env[61545]: value = "task-4255379" [ 678.447734] env[61545]: _type = "Task" [ 678.447734] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.447936] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-223a7e91-68d7-4bc7-9c02-2e86069421f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.460793] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.463177] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 678.466660] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 678.466660] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abd98396-4d7a-4133-9b9a-a51f7c77bfbe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.471762] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 678.471762] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522757b8-97ea-0e1c-30d0-a1e338d945cf" [ 678.471762] env[61545]: _type = "Task" [ 678.471762] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.487602] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522757b8-97ea-0e1c-30d0-a1e338d945cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.497152] env[61545]: DEBUG nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 678.630377] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.630621] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.786903] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255378, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.873644] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb78654-00ce-4e46-b843-7264e58100b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.885770] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c0defa-04de-4070-a603-0d5e89475761 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.923841] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f16aa6a-67b9-4428-a2a9-ce74d2f13719 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.934271] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7432b028-b166-4213-9c56-f2785c65d866 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.952175] env[61545]: DEBUG nova.compute.provider_tree [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.964064] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076692} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.964417] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 678.965289] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cb4220-f93e-4302-ab40-203f76c634ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.992443] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 679.001739] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb588d87-a65d-4ffd-b1db-2d7e20b0f4d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.034127] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522757b8-97ea-0e1c-30d0-a1e338d945cf, 'name': SearchDatastore_Task, 'duration_secs': 0.011572} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.034127] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 679.034127] env[61545]: value = "task-4255380" [ 679.034127] env[61545]: _type = "Task" [ 679.034127] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.034127] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b35c311-79e1-41c6-bf22-a5c7a2c322cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.048249] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.049536] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 679.049536] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fd5ea4-c041-ad31-a514-c5da3933fd25" [ 679.049536] env[61545]: _type = "Task" [ 679.049536] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.060371] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fd5ea4-c041-ad31-a514-c5da3933fd25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.141801] env[61545]: DEBUG nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 679.288198] env[61545]: DEBUG oslo_vmware.api [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255378, 'name': PowerOnVM_Task, 'duration_secs': 0.716578} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.288649] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 679.288649] env[61545]: INFO nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Took 9.58 seconds to spawn the instance on the hypervisor. [ 679.288766] env[61545]: DEBUG nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 679.289703] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e1eb93-23ba-47b4-ac40-54a1b6dc79f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.458281] env[61545]: DEBUG nova.scheduler.client.report [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.509152] env[61545]: DEBUG nova.network.neutron [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Updated VIF entry in instance network info cache for port e19a0a22-1c4e-4aa7-94cf-a5e630bb1857. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 679.509770] env[61545]: DEBUG nova.network.neutron [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Updating instance_info_cache with network_info: [{"id": "e19a0a22-1c4e-4aa7-94cf-a5e630bb1857", "address": "fa:16:3e:fb:18:62", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape19a0a22-1c", "ovs_interfaceid": "e19a0a22-1c4e-4aa7-94cf-a5e630bb1857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.524436] env[61545]: DEBUG nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 679.554599] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.564798] env[61545]: DEBUG nova.network.neutron [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Successfully created port: 2c39c356-8b63-44c6-9bcf-46f4765b0c4b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.573428] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fd5ea4-c041-ad31-a514-c5da3933fd25, 'name': SearchDatastore_Task, 'duration_secs': 0.030086} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.574326] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.574684] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8a3ac91d-8949-4745-9161-1a70899c0293/8a3ac91d-8949-4745-9161-1a70899c0293.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 679.577093] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 679.577335] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 679.577510] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 679.577665] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 679.577844] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 679.577959] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 679.578245] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 679.578599] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 679.578796] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 679.579067] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 679.579459] env[61545]: DEBUG nova.virt.hardware [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 679.580377] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04a587d6-4780-4990-88ef-bdf425dd9caf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.584560] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4f3edb-cabc-4678-8090-9a14c40762f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.594556] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517666b9-31db-4d84-b489-d2c84c9bc32c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.599210] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 679.599210] env[61545]: value = "task-4255381" [ 679.599210] env[61545]: _type = "Task" [ 679.599210] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.618698] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.682423] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.814320] env[61545]: INFO nova.compute.manager [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Took 23.29 seconds to build instance. [ 679.859835] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquiring lock "579fb20b-083f-4227-9a13-c0f1ea36e272" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.860102] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "579fb20b-083f-4227-9a13-c0f1ea36e272" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.860311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquiring lock "579fb20b-083f-4227-9a13-c0f1ea36e272-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.860542] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "579fb20b-083f-4227-9a13-c0f1ea36e272-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.860717] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "579fb20b-083f-4227-9a13-c0f1ea36e272-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.862844] env[61545]: INFO nova.compute.manager [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Terminating instance [ 679.962906] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.963537] env[61545]: DEBUG nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 679.970552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.128s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.973726] env[61545]: INFO nova.compute.claims [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.013359] env[61545]: DEBUG oslo_concurrency.lockutils [req-c99a6698-fafc-473d-a499-883d22d6f47a req-97b04506-3df5-4030-8e34-26ec53940b11 service nova] Releasing lock "refresh_cache-8a3ac91d-8949-4745-9161-1a70899c0293" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.052963] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255380, 'name': ReconfigVM_Task, 'duration_secs': 0.596347} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.053414] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 680.054440] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1698daba-cf16-4687-a6eb-1557f2328e54 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.064838] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 680.064838] env[61545]: value = "task-4255382" [ 680.064838] env[61545]: _type = "Task" [ 680.064838] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.077374] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255382, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.115099] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255381, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.319705] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9468e292-c285-4306-96f5-d8ad2febb656 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "4f879b20-bae0-4d50-b5e9-378356341962" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.800s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.370653] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquiring lock "refresh_cache-579fb20b-083f-4227-9a13-c0f1ea36e272" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.370975] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquired lock "refresh_cache-579fb20b-083f-4227-9a13-c0f1ea36e272" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.371385] env[61545]: DEBUG nova.network.neutron [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.434801] env[61545]: DEBUG nova.network.neutron [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Successfully updated port: 400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 680.472724] env[61545]: DEBUG nova.compute.utils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 680.474125] env[61545]: DEBUG nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 680.474295] env[61545]: DEBUG nova.network.neutron [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 680.580371] env[61545]: DEBUG nova.policy [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '820afec002514775bd94a71b4d4547a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e060fc87d3ea4aa9bb25853eeeca3c23', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 680.589317] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255382, 'name': Rename_Task, 'duration_secs': 0.209253} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.589698] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 680.592038] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec5d9473-de4f-45ee-a8a0-803d7503ab79 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.601158] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 680.601158] env[61545]: value = "task-4255383" [ 680.601158] env[61545]: _type = "Task" [ 680.601158] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.614969] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.618794] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600161} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.619326] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8a3ac91d-8949-4745-9161-1a70899c0293/8a3ac91d-8949-4745-9161-1a70899c0293.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 680.619770] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 680.620164] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70d8bd9a-85c2-45c2-b114-9b32e6c39996 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.630332] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 680.630332] env[61545]: value = "task-4255384" [ 680.630332] env[61545]: _type = "Task" [ 680.630332] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.643122] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255384, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.911214] env[61545]: DEBUG nova.network.neutron [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.938671] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquiring lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.938671] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquired lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.938671] env[61545]: DEBUG nova.network.neutron [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.980143] env[61545]: DEBUG nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 681.013533] env[61545]: DEBUG nova.network.neutron [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.080337] env[61545]: DEBUG nova.network.neutron [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Successfully created port: b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 681.113917] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255383, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.144751] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255384, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08098} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.151799] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 681.153109] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad035eb-16da-44f4-9383-4469684d2306 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.179927] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 8a3ac91d-8949-4745-9161-1a70899c0293/8a3ac91d-8949-4745-9161-1a70899c0293.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 681.184033] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26c94abc-a3a7-455e-b587-32e09d9c14de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.208512] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 681.208512] env[61545]: value = "task-4255385" [ 681.208512] env[61545]: _type = "Task" [ 681.208512] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.217893] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255385, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.428081] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ecb883-863c-448b-9fb8-13d0094ac3a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.439101] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723d12cb-dd2a-4f07-a955-6c77af3a5265 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.478776] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4503fc43-41f0-4156-99a1-13e38a651524 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.493565] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc2d88e-ebec-4ef8-a638-30d1e859862c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.503414] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Acquiring lock "1bf6b40d-8b9d-4e3e-8dda-9170d88e002d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.503479] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Lock "1bf6b40d-8b9d-4e3e-8dda-9170d88e002d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.517497] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Releasing lock "refresh_cache-579fb20b-083f-4227-9a13-c0f1ea36e272" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.517847] env[61545]: DEBUG nova.compute.manager [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 681.518062] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 681.518726] env[61545]: DEBUG nova.compute.provider_tree [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.522024] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540ef260-eea8-49c6-ab15-27ae6927fc9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.532548] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 681.532852] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58c422d9-de1d-4697-b950-1ca61d4f5fa9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.542348] env[61545]: DEBUG oslo_vmware.api [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 681.542348] env[61545]: value = "task-4255386" [ 681.542348] env[61545]: _type = "Task" [ 681.542348] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.553802] env[61545]: DEBUG oslo_vmware.api [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.615514] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255383, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.717841] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255385, 'name': ReconfigVM_Task, 'duration_secs': 0.317828} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.718051] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 8a3ac91d-8949-4745-9161-1a70899c0293/8a3ac91d-8949-4745-9161-1a70899c0293.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 681.718960] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe688ea8-cbe6-47fd-8a70-202c3f8156cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.726425] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 681.726425] env[61545]: value = "task-4255387" [ 681.726425] env[61545]: _type = "Task" [ 681.726425] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.736413] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255387, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.757204] env[61545]: DEBUG nova.network.neutron [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.765839] env[61545]: DEBUG nova.compute.manager [req-bf061035-3857-4631-947e-a254f78f424a req-3abf00a0-4953-4803-8f9b-050e4fd07e66 service nova] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Received event network-vif-deleted-72abf1ec-6ac4-4654-b096-bdfb06c58f03 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 681.765961] env[61545]: DEBUG nova.compute.manager [req-bf061035-3857-4631-947e-a254f78f424a req-3abf00a0-4953-4803-8f9b-050e4fd07e66 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Received event network-vif-plugged-400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 681.766140] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf061035-3857-4631-947e-a254f78f424a req-3abf00a0-4953-4803-8f9b-050e4fd07e66 service nova] Acquiring lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.766399] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf061035-3857-4631-947e-a254f78f424a req-3abf00a0-4953-4803-8f9b-050e4fd07e66 service nova] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.766592] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf061035-3857-4631-947e-a254f78f424a req-3abf00a0-4953-4803-8f9b-050e4fd07e66 service nova] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.766760] env[61545]: DEBUG nova.compute.manager [req-bf061035-3857-4631-947e-a254f78f424a req-3abf00a0-4953-4803-8f9b-050e4fd07e66 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] No waiting events found dispatching network-vif-plugged-400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 681.766920] env[61545]: WARNING nova.compute.manager [req-bf061035-3857-4631-947e-a254f78f424a req-3abf00a0-4953-4803-8f9b-050e4fd07e66 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Received unexpected event network-vif-plugged-400a0ea3-0087-4d35-bc44-2849c40231e6 for instance with vm_state building and task_state spawning. [ 681.996041] env[61545]: DEBUG nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 682.009428] env[61545]: DEBUG nova.compute.manager [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 682.023551] env[61545]: DEBUG nova.scheduler.client.report [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.055568] env[61545]: DEBUG oslo_vmware.api [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255386, 'name': PowerOffVM_Task, 'duration_secs': 0.137341} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.059198] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 682.059454] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.059604] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.059779] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.060253] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.060253] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 682.060619] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 682.060619] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 682.060619] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 682.060753] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 682.060893] env[61545]: DEBUG nova.virt.hardware [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 682.061389] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 682.061389] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 682.062640] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf25bfc8-66b5-4f1e-98e0-cfbbf1838dc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.065763] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf67ddd1-0a2a-4190-908a-49b9c334a2eb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.073767] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc2a6aa-76d5-4343-948c-9e6ae9582155 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.093923] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 682.094171] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 682.094355] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Deleting the datastore file [datastore2] 579fb20b-083f-4227-9a13-c0f1ea36e272 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 682.094624] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93c0a74e-d73e-4588-b821-db9fcddd0155 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.103762] env[61545]: DEBUG oslo_vmware.api [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for the task: (returnval){ [ 682.103762] env[61545]: value = "task-4255389" [ 682.103762] env[61545]: _type = "Task" [ 682.103762] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.118954] env[61545]: DEBUG oslo_vmware.api [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255383, 'name': PowerOnVM_Task, 'duration_secs': 1.111703} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.119291] env[61545]: DEBUG oslo_vmware.api [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.119558] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 682.121043] env[61545]: INFO nova.compute.manager [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Took 7.54 seconds to spawn the instance on the hypervisor. [ 682.121043] env[61545]: DEBUG nova.compute.manager [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 682.121043] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48581f9-6b77-4e56-a870-212964dfd245 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.241648] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255387, 'name': Rename_Task, 'duration_secs': 0.153035} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.241865] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 682.242147] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d750f70-e014-4f6a-9a3f-1e99fa218efd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.252639] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 682.252639] env[61545]: value = "task-4255390" [ 682.252639] env[61545]: _type = "Task" [ 682.252639] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.260816] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255390, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.302024] env[61545]: DEBUG nova.network.neutron [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Updating instance_info_cache with network_info: [{"id": "400a0ea3-0087-4d35-bc44-2849c40231e6", "address": "fa:16:3e:24:ec:a6", "network": {"id": "5c67fa2d-8198-40b5-a811-d302e1d825c1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-800705450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d8c8b7d5250486a902f8655029c4f97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap400a0ea3-00", "ovs_interfaceid": "400a0ea3-0087-4d35-bc44-2849c40231e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.533528] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.534161] env[61545]: DEBUG nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 682.537691] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.066s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.543752] env[61545]: INFO nova.compute.claims [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.547700] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.612813] env[61545]: DEBUG oslo_vmware.api [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Task: {'id': task-4255389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114915} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.613146] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 682.613338] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 682.613517] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 682.613705] env[61545]: INFO nova.compute.manager [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Took 1.10 seconds to destroy the instance on the hypervisor. [ 682.613962] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 682.614584] env[61545]: DEBUG nova.compute.manager [-] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 682.614668] env[61545]: DEBUG nova.network.neutron [-] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 682.645956] env[61545]: INFO nova.compute.manager [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Took 25.75 seconds to build instance. [ 682.668183] env[61545]: DEBUG nova.network.neutron [-] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.763147] env[61545]: DEBUG oslo_vmware.api [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255390, 'name': PowerOnVM_Task, 'duration_secs': 0.501031} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.764357] env[61545]: DEBUG nova.network.neutron [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Successfully updated port: 2c39c356-8b63-44c6-9bcf-46f4765b0c4b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 682.767948] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 682.768332] env[61545]: INFO nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Took 10.69 seconds to spawn the instance on the hypervisor. [ 682.768540] env[61545]: DEBUG nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 682.770259] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae164754-69cc-484a-8b8c-e21ae10fd729 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.805095] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Releasing lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.805480] env[61545]: DEBUG nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Instance network_info: |[{"id": "400a0ea3-0087-4d35-bc44-2849c40231e6", "address": "fa:16:3e:24:ec:a6", "network": {"id": "5c67fa2d-8198-40b5-a811-d302e1d825c1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-800705450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d8c8b7d5250486a902f8655029c4f97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap400a0ea3-00", "ovs_interfaceid": "400a0ea3-0087-4d35-bc44-2849c40231e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 682.806642] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:ec:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '400a0ea3-0087-4d35-bc44-2849c40231e6', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.819305] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Creating folder: Project (0d8c8b7d5250486a902f8655029c4f97). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.819305] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33c87363-7d1b-4592-9509-17ff8fa289d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.831387] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Created folder: Project (0d8c8b7d5250486a902f8655029c4f97) in parent group-v838542. [ 682.831583] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Creating folder: Instances. Parent ref: group-v838573. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.831900] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1afa92b9-5ece-4a67-9bec-7845cdd149cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.843757] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Created folder: Instances in parent group-v838573. [ 682.844028] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 682.844226] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 682.844432] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2240dec2-7f0f-4bc9-a746-2092b4f4d4a4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.867944] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.867944] env[61545]: value = "task-4255393" [ 682.867944] env[61545]: _type = "Task" [ 682.867944] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.879950] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255393, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.050332] env[61545]: DEBUG nova.compute.utils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 683.052359] env[61545]: DEBUG nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 683.055962] env[61545]: DEBUG nova.network.neutron [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 683.148047] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be39c0db-7466-4ff5-8d40-9ffc0354f20c tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "5719daa8-a5bc-4604-b465-a57097695c6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.260s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.153580] env[61545]: DEBUG nova.policy [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa7d4bbc40b146f2b159d450ef9b805d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82cfc8f4bc7d4a6da10e234b077f761f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 683.171036] env[61545]: DEBUG nova.network.neutron [-] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.269917] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquiring lock "refresh_cache-1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.270147] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquired lock "refresh_cache-1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.270338] env[61545]: DEBUG nova.network.neutron [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 683.296432] env[61545]: INFO nova.compute.manager [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Took 26.65 seconds to build instance. [ 683.384063] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255393, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.563825] env[61545]: DEBUG nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 683.678511] env[61545]: INFO nova.compute.manager [-] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Took 1.06 seconds to deallocate network for instance. [ 683.798310] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c06c4048-26ea-477d-9ff3-19bdc18c9600 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "8a3ac91d-8949-4745-9161-1a70899c0293" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.168s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.885162] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255393, 'name': CreateVM_Task, 'duration_secs': 0.639548} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.890252] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.890252] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.890539] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.890600] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 683.890860] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7954bf89-3b36-4aba-ae0b-31c65784c2a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.898479] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 683.898479] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52371925-74c8-1bf2-9359-e64637b8a0ab" [ 683.898479] env[61545]: _type = "Task" [ 683.898479] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.905232] env[61545]: DEBUG nova.network.neutron [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.916207] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52371925-74c8-1bf2-9359-e64637b8a0ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.945677] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff229c9e-483d-4fcf-9fca-bc0f673887b1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.955133] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9805d2-8397-42de-abe2-10756e556e64 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.998623] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbebed0-7d6d-4851-a1ae-1fdc27e5e026 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.008839] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38882567-e57f-4aff-bc17-0af4a4b107aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.026858] env[61545]: DEBUG nova.compute.provider_tree [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.183636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "4f879b20-bae0-4d50-b5e9-378356341962" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.184161] env[61545]: DEBUG oslo_concurrency.lockutils [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "4f879b20-bae0-4d50-b5e9-378356341962" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.184536] env[61545]: INFO nova.compute.manager [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Rebooting instance [ 684.192384] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.420665] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52371925-74c8-1bf2-9359-e64637b8a0ab, 'name': SearchDatastore_Task, 'duration_secs': 0.031049} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.421388] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.421633] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.421991] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.422239] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.422511] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.423815] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-404e2e66-5bde-4a45-9057-3cca2cb53e9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.437272] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.437874] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.438891] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7779f94c-4e85-48fa-93d3-1084b4248f49 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.452625] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 684.452625] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cfca7d-2215-6737-1936-595324e6d11d" [ 684.452625] env[61545]: _type = "Task" [ 684.452625] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.475774] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cfca7d-2215-6737-1936-595324e6d11d, 'name': SearchDatastore_Task, 'duration_secs': 0.015178} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.478289] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98409fd6-85f0-4fc3-a4a8-f68e661661db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.489497] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 684.489497] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52edbc19-d80e-48db-7af2-23affa06be37" [ 684.489497] env[61545]: _type = "Task" [ 684.489497] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.509293] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52edbc19-d80e-48db-7af2-23affa06be37, 'name': SearchDatastore_Task, 'duration_secs': 0.014959} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.509752] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.510658] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 0554c462-1dc5-4043-94ac-7a3d28ed05e1/0554c462-1dc5-4043-94ac-7a3d28ed05e1.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.510658] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab852089-d09a-4561-b171-f0fafaa24658 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.523502] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 684.523502] env[61545]: value = "task-4255394" [ 684.523502] env[61545]: _type = "Task" [ 684.523502] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.532298] env[61545]: DEBUG nova.scheduler.client.report [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 684.547549] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.581363] env[61545]: DEBUG nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.638273] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.638273] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.638511] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.638860] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.639093] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.639385] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.639496] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.639654] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.639822] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.639985] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.640176] env[61545]: DEBUG nova.virt.hardware [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.641599] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78b38ae-e37f-4d9d-80cd-313365d8a0b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.656441] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3411c72b-6930-400b-a5d2-49b9ecfbefdf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.747302] env[61545]: DEBUG oslo_concurrency.lockutils [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.747302] env[61545]: DEBUG oslo_concurrency.lockutils [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquired lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.747302] env[61545]: DEBUG nova.network.neutron [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.009594] env[61545]: DEBUG nova.network.neutron [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Successfully updated port: b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 685.016124] env[61545]: DEBUG nova.network.neutron [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Successfully created port: 4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 685.043850] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.044669] env[61545]: DEBUG nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 685.049445] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255394, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.049445] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.334s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.049687] env[61545]: DEBUG nova.objects.instance [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Lazy-loading 'resources' on Instance uuid 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 685.054102] env[61545]: DEBUG nova.network.neutron [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Updating instance_info_cache with network_info: [{"id": "2c39c356-8b63-44c6-9bcf-46f4765b0c4b", "address": "fa:16:3e:59:b2:d3", "network": {"id": "59241c11-b8ca-4e60-9011-e383c1a17d97", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1291782309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d941553019a47658b8a9366f722a571", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c39c356-8b", "ovs_interfaceid": "2c39c356-8b63-44c6-9bcf-46f4765b0c4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.310569] env[61545]: DEBUG nova.compute.manager [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Received event network-changed-400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 685.311434] env[61545]: DEBUG nova.compute.manager [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Refreshing instance network info cache due to event network-changed-400a0ea3-0087-4d35-bc44-2849c40231e6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 685.311434] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Acquiring lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.311434] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Acquired lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.311434] env[61545]: DEBUG nova.network.neutron [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Refreshing network info cache for port 400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.522627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.522627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.522627] env[61545]: DEBUG nova.network.neutron [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.543506] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255394, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790814} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.544071] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 0554c462-1dc5-4043-94ac-7a3d28ed05e1/0554c462-1dc5-4043-94ac-7a3d28ed05e1.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.547024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.547024] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c17bfc2-460f-4823-89d0-8dc99677e833 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.551218] env[61545]: DEBUG nova.compute.utils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 685.554043] env[61545]: DEBUG nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 685.555048] env[61545]: DEBUG nova.network.neutron [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 685.567168] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Releasing lock "refresh_cache-1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.567168] env[61545]: DEBUG nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Instance network_info: |[{"id": "2c39c356-8b63-44c6-9bcf-46f4765b0c4b", "address": "fa:16:3e:59:b2:d3", "network": {"id": "59241c11-b8ca-4e60-9011-e383c1a17d97", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1291782309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d941553019a47658b8a9366f722a571", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c39c356-8b", "ovs_interfaceid": "2c39c356-8b63-44c6-9bcf-46f4765b0c4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.567403] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 685.567403] env[61545]: value = "task-4255395" [ 685.567403] env[61545]: _type = "Task" [ 685.567403] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.567403] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:b2:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '191a5351-07d5-4138-b855-206f48fc4375', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c39c356-8b63-44c6-9bcf-46f4765b0c4b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.578371] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Creating folder: Project (1d941553019a47658b8a9366f722a571). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.580604] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46460423-69da-4e65-815e-a595e442fd00 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.595584] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.601026] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Created folder: Project (1d941553019a47658b8a9366f722a571) in parent group-v838542. [ 685.601026] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Creating folder: Instances. Parent ref: group-v838576. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.601026] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31900f55-d8ed-467f-b7ff-6f431ad41096 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.614697] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Created folder: Instances in parent group-v838576. [ 685.615604] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.616128] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.619024] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc758beb-298b-40b0-b711-3a95639a7052 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.643235] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.643235] env[61545]: value = "task-4255398" [ 685.643235] env[61545]: _type = "Task" [ 685.643235] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.657246] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255398, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.876183] env[61545]: DEBUG nova.policy [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4cac6ff129749928a47576d5247364d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d4e84d3c1e741aa985cf8f6527112b2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 685.995494] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a560e275-cc6c-46e3-83ce-c0fa62f634ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.003671] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18d1fdc-681c-4608-a460-4290e2ebffb9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.042256] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ddfc7b-ecae-4e43-bc22-3dfe0f9db407 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.052670] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7be3b6-92ad-4e4f-ab43-f6b4698c8834 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.068626] env[61545]: DEBUG nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 686.072383] env[61545]: DEBUG nova.compute.provider_tree [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.090674] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148314} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.090969] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.092770] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe768163-3381-4b36-8bb7-2d63850ee49f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.132804] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 0554c462-1dc5-4043-94ac-7a3d28ed05e1/0554c462-1dc5-4043-94ac-7a3d28ed05e1.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.136684] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-346e485e-c41a-4541-b8a9-7654c3f23531 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.167777] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255398, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.169418] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 686.169418] env[61545]: value = "task-4255399" [ 686.169418] env[61545]: _type = "Task" [ 686.169418] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.179711] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255399, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.247794] env[61545]: DEBUG nova.network.neutron [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.587411] env[61545]: DEBUG nova.scheduler.client.report [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 686.668626] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255398, 'name': CreateVM_Task, 'duration_secs': 0.933219} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.668968] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.669691] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.670808] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.671066] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 686.671458] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab1fef43-4e97-4791-b4a5-3954b89d6452 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.687156] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.687156] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 686.687156] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5225dea7-aeba-460b-a70d-82e2dfeed225" [ 686.687156] env[61545]: _type = "Task" [ 686.687156] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.699371] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5225dea7-aeba-460b-a70d-82e2dfeed225, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.705354] env[61545]: INFO nova.compute.manager [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Rebuilding instance [ 686.757104] env[61545]: DEBUG nova.compute.manager [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 686.758107] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea481b5-5336-4e99-8b8a-9be2d6cb42d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.929766] env[61545]: DEBUG nova.network.neutron [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Updating instance_info_cache with network_info: [{"id": "4823df3e-d5b5-411f-b835-7dddc654e899", "address": "fa:16:3e:20:5e:b9", "network": {"id": "2437aa91-36dd-4084-a14b-9ed28ba4c29f", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-602700299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e59cf3bff474453a38297e54e84ec89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4823df3e-d5", "ovs_interfaceid": "4823df3e-d5b5-411f-b835-7dddc654e899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.038779] env[61545]: DEBUG nova.network.neutron [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Updated VIF entry in instance network info cache for port 400a0ea3-0087-4d35-bc44-2849c40231e6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 687.038871] env[61545]: DEBUG nova.network.neutron [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Updating instance_info_cache with network_info: [{"id": "400a0ea3-0087-4d35-bc44-2849c40231e6", "address": "fa:16:3e:24:ec:a6", "network": {"id": "5c67fa2d-8198-40b5-a811-d302e1d825c1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-800705450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d8c8b7d5250486a902f8655029c4f97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap400a0ea3-00", "ovs_interfaceid": "400a0ea3-0087-4d35-bc44-2849c40231e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.089885] env[61545]: DEBUG nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 687.097732] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.046s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.103785] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.205s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.103785] env[61545]: DEBUG nova.objects.instance [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lazy-loading 'resources' on Instance uuid 1a551e66-1b98-44fd-ad16-c20113d9b1a6 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 687.131497] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 687.131655] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.134244] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 687.134244] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.134244] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 687.134244] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 687.134244] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 687.134434] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 687.134434] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 687.134434] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 687.134434] env[61545]: DEBUG nova.virt.hardware [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 687.134580] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67c7bfe-7da5-4e54-8430-40fbb38dbdcc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.139917] env[61545]: DEBUG nova.network.neutron [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updating instance_info_cache with network_info: [{"id": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "address": "fa:16:3e:9d:12:c4", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4792d3c-8f", "ovs_interfaceid": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.146908] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff2c7ce-d058-4398-8dc9-33297db1d946 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.166440] env[61545]: INFO nova.scheduler.client.report [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Deleted allocations for instance 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc [ 687.188061] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255399, 'name': ReconfigVM_Task, 'duration_secs': 0.734846} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.191907] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 0554c462-1dc5-4043-94ac-7a3d28ed05e1/0554c462-1dc5-4043-94ac-7a3d28ed05e1.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.193231] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7163ce6a-b7da-41b5-9168-a259bf2a8ac1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.208020] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5225dea7-aeba-460b-a70d-82e2dfeed225, 'name': SearchDatastore_Task, 'duration_secs': 0.015711} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.209394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.209627] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 687.209849] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.209989] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.210172] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 687.210859] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 687.210859] env[61545]: value = "task-4255400" [ 687.210859] env[61545]: _type = "Task" [ 687.210859] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.211161] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71c5588f-4ffb-4b26-9a8b-77d0f075e506 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.230887] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255400, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.232359] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 687.232468] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 687.233506] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c62a98e-9321-4b0e-ad73-e034970b068e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.240132] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 687.240132] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a57897-704e-283d-a676-3abe76738089" [ 687.240132] env[61545]: _type = "Task" [ 687.240132] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.250471] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a57897-704e-283d-a676-3abe76738089, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.434322] env[61545]: DEBUG oslo_concurrency.lockutils [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Releasing lock "refresh_cache-4f879b20-bae0-4d50-b5e9-378356341962" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.542760] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Releasing lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.542760] env[61545]: DEBUG nova.compute.manager [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Received event network-vif-plugged-2c39c356-8b63-44c6-9bcf-46f4765b0c4b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 687.542760] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Acquiring lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.542760] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.542760] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.543264] env[61545]: DEBUG nova.compute.manager [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] No waiting events found dispatching network-vif-plugged-2c39c356-8b63-44c6-9bcf-46f4765b0c4b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.546611] env[61545]: WARNING nova.compute.manager [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Received unexpected event network-vif-plugged-2c39c356-8b63-44c6-9bcf-46f4765b0c4b for instance with vm_state building and task_state spawning. [ 687.546611] env[61545]: DEBUG nova.compute.manager [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Received event network-changed-2c39c356-8b63-44c6-9bcf-46f4765b0c4b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 687.546611] env[61545]: DEBUG nova.compute.manager [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Refreshing instance network info cache due to event network-changed-2c39c356-8b63-44c6-9bcf-46f4765b0c4b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 687.546611] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Acquiring lock "refresh_cache-1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.546611] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Acquired lock "refresh_cache-1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.546919] env[61545]: DEBUG nova.network.neutron [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Refreshing network info cache for port 2c39c356-8b63-44c6-9bcf-46f4765b0c4b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.641327] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Releasing lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.641686] env[61545]: DEBUG nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Instance network_info: |[{"id": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "address": "fa:16:3e:9d:12:c4", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4792d3c-8f", "ovs_interfaceid": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 687.642158] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:12:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da623279-b6f6-4570-8b15-a332120b8b60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.650543] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Creating folder: Project (e060fc87d3ea4aa9bb25853eeeca3c23). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.654296] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72988e20-4d3c-4669-a17b-dd6f132aaab9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.669185] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Created folder: Project (e060fc87d3ea4aa9bb25853eeeca3c23) in parent group-v838542. [ 687.669435] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Creating folder: Instances. Parent ref: group-v838579. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.673103] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4017513c-b95c-49d3-b2c1-448f236220b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.680952] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ce3d1c1-33e7-4bd9-a304-5ad003a0e32a tempest-DeleteServersAdminTestJSON-446284206 tempest-DeleteServersAdminTestJSON-446284206-project-admin] Lock "2a43ac48-cdea-48c8-b3d2-e939c69ce2dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.880s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.694676] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Created folder: Instances in parent group-v838579. [ 687.694792] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.694902] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.695229] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de50525a-31c9-4a78-a607-54b180000286 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.726242] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.726242] env[61545]: value = "task-4255403" [ 687.726242] env[61545]: _type = "Task" [ 687.726242] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.733579] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255400, 'name': Rename_Task, 'duration_secs': 0.158871} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.735139] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.737827] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78cc500e-9101-4323-b395-4ce668c77ee9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.744111] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255403, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.750729] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 687.750729] env[61545]: value = "task-4255404" [ 687.750729] env[61545]: _type = "Task" [ 687.750729] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.758012] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a57897-704e-283d-a676-3abe76738089, 'name': SearchDatastore_Task, 'duration_secs': 0.013429} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.761978] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a114cbe6-6855-4d9f-838f-b8840d185a93 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.769576] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255404, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.774226] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 687.774335] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 687.774335] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529773b3-7ea6-0213-813d-0bd7cbd9e210" [ 687.774335] env[61545]: _type = "Task" [ 687.774335] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.774574] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0da1f8d1-da97-466b-a39e-0111a7e25a00 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.787118] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529773b3-7ea6-0213-813d-0bd7cbd9e210, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.788947] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 687.788947] env[61545]: value = "task-4255405" [ 687.788947] env[61545]: _type = "Task" [ 687.788947] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.802279] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.908477] env[61545]: DEBUG nova.network.neutron [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Successfully created port: f3a5e417-6468-4590-aa5e-e24c16364727 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.941843] env[61545]: DEBUG nova.compute.manager [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 687.945370] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a5cab4-f3e1-4669-a04e-cb461e8ee6a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.028770] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32b5ed8-ca47-47c1-bf6b-faf4f42fc75b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.036555] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de22cad7-f3e6-4266-b617-dc040363a730 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.078910] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754bd301-afe6-44c7-87ba-f4095dd691bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.086071] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2548b4-67de-450d-8935-0239e25059ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.106378] env[61545]: DEBUG nova.compute.provider_tree [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.240884] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255403, 'name': CreateVM_Task, 'duration_secs': 0.370004} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.242301] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 688.242301] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.242868] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.242868] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 688.243266] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebe9f6d7-1b71-4c38-94b5-6ac1b69226bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.249961] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 688.249961] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5274b468-3cac-4f61-8709-ca5fc7cd9a91" [ 688.249961] env[61545]: _type = "Task" [ 688.249961] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.262910] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5274b468-3cac-4f61-8709-ca5fc7cd9a91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.267409] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255404, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.298776] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529773b3-7ea6-0213-813d-0bd7cbd9e210, 'name': SearchDatastore_Task, 'duration_secs': 0.017669} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.303760] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.304594] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3/1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 688.304907] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9adddb15-2c92-4788-95e7-7c07ca701021 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.315908] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255405, 'name': PowerOffVM_Task, 'duration_secs': 0.16573} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.318174] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.318890] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.319300] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 688.319300] env[61545]: value = "task-4255406" [ 688.319300] env[61545]: _type = "Task" [ 688.319300] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.320298] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f66ed7d-9210-40b8-8c56-6f4cd1c5ac20 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.344406] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.345295] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.345708] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d5192a9-e7ba-4157-a559-902b391b6ea0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.377138] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.377466] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.377637] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Deleting the datastore file [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.377923] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5fde9db-6464-4b49-8280-13fd44fce459 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.386033] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 688.386033] env[61545]: value = "task-4255409" [ 688.386033] env[61545]: _type = "Task" [ 688.386033] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.395796] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.612592] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "43cf4c96-2c8b-4520-8926-c1be5a87734e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.612792] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "43cf4c96-2c8b-4520-8926-c1be5a87734e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.643781] env[61545]: ERROR nova.scheduler.client.report [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] [req-ab8e61ed-7eb0-4fa5-b5e9-e97e6e0bf62b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ab8e61ed-7eb0-4fa5-b5e9-e97e6e0bf62b"}]} [ 688.675387] env[61545]: DEBUG nova.scheduler.client.report [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 688.696895] env[61545]: DEBUG nova.scheduler.client.report [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 688.696895] env[61545]: DEBUG nova.compute.provider_tree [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.715624] env[61545]: DEBUG nova.scheduler.client.report [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 688.745955] env[61545]: DEBUG nova.scheduler.client.report [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 688.767819] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5274b468-3cac-4f61-8709-ca5fc7cd9a91, 'name': SearchDatastore_Task, 'duration_secs': 0.028516} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.772437] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.772933] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.773470] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.773839] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.774207] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 688.774607] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255404, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.778435] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5342970-affd-4a41-9365-bdfc2ade6e8a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.792950] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 688.793578] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 688.793994] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ead5e77-f4b9-48e1-bf76-403aa2d3442d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.813908] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 688.813908] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5265a757-457d-f819-c26d-5439fd9cad85" [ 688.813908] env[61545]: _type = "Task" [ 688.813908] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.839852] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5265a757-457d-f819-c26d-5439fd9cad85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.855291] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255406, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.884570] env[61545]: DEBUG nova.compute.manager [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Received event network-vif-plugged-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 688.884808] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] Acquiring lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.885947] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] Lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.885947] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] Lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.885947] env[61545]: DEBUG nova.compute.manager [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] No waiting events found dispatching network-vif-plugged-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 688.885947] env[61545]: WARNING nova.compute.manager [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Received unexpected event network-vif-plugged-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e for instance with vm_state building and task_state spawning. [ 688.885947] env[61545]: DEBUG nova.compute.manager [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Received event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 688.886333] env[61545]: DEBUG nova.compute.manager [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing instance network info cache due to event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 688.886580] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] Acquiring lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.886718] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] Acquired lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.886873] env[61545]: DEBUG nova.network.neutron [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 688.914392] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142488} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.917517] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 688.917742] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 688.917915] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.978158] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0be7de-f700-4637-9834-41a9e1beaf41 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.996072] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Doing hard reboot of VM {{(pid=61545) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 688.996072] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-affbc3c1-aae0-4e78-8420-6203a133c50b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.003768] env[61545]: DEBUG oslo_vmware.api [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 689.003768] env[61545]: value = "task-4255413" [ 689.003768] env[61545]: _type = "Task" [ 689.003768] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.013425] env[61545]: DEBUG oslo_vmware.api [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255413, 'name': ResetVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.111842] env[61545]: DEBUG nova.network.neutron [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Updated VIF entry in instance network info cache for port 2c39c356-8b63-44c6-9bcf-46f4765b0c4b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 689.112253] env[61545]: DEBUG nova.network.neutron [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Updating instance_info_cache with network_info: [{"id": "2c39c356-8b63-44c6-9bcf-46f4765b0c4b", "address": "fa:16:3e:59:b2:d3", "network": {"id": "59241c11-b8ca-4e60-9011-e383c1a17d97", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1291782309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d941553019a47658b8a9366f722a571", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c39c356-8b", "ovs_interfaceid": "2c39c356-8b63-44c6-9bcf-46f4765b0c4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.122171] env[61545]: DEBUG nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 689.145365] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5d2029-539c-47e9-93aa-7b8f6ce7d01d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.160802] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0fb74a-7ebe-4ba2-bf06-5d27441a0ec4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.210854] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db774d4-3f02-4fd9-aa4e-4622cb068e50 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.221981] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a553836e-5987-4cf8-a3f5-42d62810739a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.241265] env[61545]: DEBUG nova.compute.provider_tree [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.270317] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255404, 'name': PowerOnVM_Task} progress is 73%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.324131] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5265a757-457d-f819-c26d-5439fd9cad85, 'name': SearchDatastore_Task, 'duration_secs': 0.062928} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.325943] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-772e0b1c-1070-4729-8cb2-dedfe2b9d523 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.338379] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 689.338379] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521cb6da-8eb7-a2ac-5bdd-96de18815317" [ 689.338379] env[61545]: _type = "Task" [ 689.338379] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.342352] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255406, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65529} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.346093] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3/1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 689.346337] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 689.346726] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47561eaa-b1e5-4bf3-9eef-2fe6dec2a2e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.355695] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521cb6da-8eb7-a2ac-5bdd-96de18815317, 'name': SearchDatastore_Task, 'duration_secs': 0.013649} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.357531] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.357937] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d7ed99e5-3f96-4053-9b9a-a4b7edb1f351/d7ed99e5-3f96-4053-9b9a-a4b7edb1f351.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 689.358450] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 689.358450] env[61545]: value = "task-4255414" [ 689.358450] env[61545]: _type = "Task" [ 689.358450] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.358834] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da224487-83bd-44c2-9bf9-d8312d96778b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.373865] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255414, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.375270] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 689.375270] env[61545]: value = "task-4255415" [ 689.375270] env[61545]: _type = "Task" [ 689.375270] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.385636] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255415, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.425736] env[61545]: DEBUG nova.network.neutron [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Successfully updated port: 4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.515275] env[61545]: DEBUG oslo_vmware.api [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255413, 'name': ResetVM_Task, 'duration_secs': 0.376087} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.515563] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Did hard reboot of VM {{(pid=61545) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 689.515756] env[61545]: DEBUG nova.compute.manager [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.517132] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79946c59-139c-470d-b6a4-07b0941caf5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.618309] env[61545]: DEBUG oslo_concurrency.lockutils [req-63613e6a-43e5-42f1-81e9-765d4806f7be req-25df9e86-7647-4a53-a119-0c1d595ce325 service nova] Releasing lock "refresh_cache-1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.664289] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.745231] env[61545]: DEBUG nova.scheduler.client.report [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.773376] env[61545]: DEBUG oslo_vmware.api [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255404, 'name': PowerOnVM_Task, 'duration_secs': 1.749113} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.774018] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.774128] env[61545]: INFO nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Took 12.78 seconds to spawn the instance on the hypervisor. [ 689.775526] env[61545]: DEBUG nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.775526] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6a7a72-c143-4ae4-b231-40683b4665a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.874472] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255414, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087701} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.874472] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.875199] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73ea32e-b4b4-4462-bb93-2689f05f20a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.912949] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3/1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.918848] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf91bcf3-60be-47ed-974b-92c3cd15c2fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.935770] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255415, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.939353] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquiring lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.939940] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquired lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.940237] env[61545]: DEBUG nova.network.neutron [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.953023] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 689.953023] env[61545]: value = "task-4255416" [ 689.953023] env[61545]: _type = "Task" [ 689.953023] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.967484] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255416, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.988504] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 689.988777] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 689.988928] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 689.989539] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 689.989539] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 689.989539] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 689.989694] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 689.989771] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 689.989930] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 689.990111] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 689.990288] env[61545]: DEBUG nova.virt.hardware [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 689.991291] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed62284d-2583-48e1-a6c2-fa4bd026e3e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.003662] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914897a0-d946-4f4c-8a2c-ec94fbc1594d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.020513] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.028173] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 690.028509] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 690.028741] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2c4cdf7-0abd-478b-8e3a-308b832ba813 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.052100] env[61545]: DEBUG oslo_concurrency.lockutils [None req-69096d48-1447-43b2-a853-6f9b7d06b064 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "4f879b20-bae0-4d50-b5e9-378356341962" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.867s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.061032] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.061032] env[61545]: value = "task-4255417" [ 690.061032] env[61545]: _type = "Task" [ 690.061032] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.070449] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255417, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.150346] env[61545]: DEBUG nova.network.neutron [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updated VIF entry in instance network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 690.150984] env[61545]: DEBUG nova.network.neutron [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updating instance_info_cache with network_info: [{"id": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "address": "fa:16:3e:9d:12:c4", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4792d3c-8f", "ovs_interfaceid": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.252103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.149s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.254622] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.937s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.258030] env[61545]: INFO nova.compute.claims [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.294045] env[61545]: INFO nova.scheduler.client.report [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Deleted allocations for instance 1a551e66-1b98-44fd-ad16-c20113d9b1a6 [ 690.308541] env[61545]: INFO nova.compute.manager [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Took 27.26 seconds to build instance. [ 690.394607] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255415, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628885} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.394607] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d7ed99e5-3f96-4053-9b9a-a4b7edb1f351/d7ed99e5-3f96-4053-9b9a-a4b7edb1f351.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 690.395027] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 690.399626] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57bbbdfd-f8a2-4c9f-99c6-124be0b552e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.411194] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 690.411194] env[61545]: value = "task-4255418" [ 690.411194] env[61545]: _type = "Task" [ 690.411194] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.423812] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255418, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.467521] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255416, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.559646] env[61545]: DEBUG nova.network.neutron [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.578077] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255417, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.656548] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0859202-31aa-439f-8a22-3433ee77cfc0 req-d6b792bd-1aa4-4771-b745-4547024477f7 service nova] Releasing lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.810857] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3c010039-90b7-4d8d-a7ed-a7901b5b9e61 tempest-ServerDiagnosticsNegativeTest-1080856404 tempest-ServerDiagnosticsNegativeTest-1080856404-project-member] Lock "1a551e66-1b98-44fd-ad16-c20113d9b1a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.282s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.814258] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56a55048-04dd-46c7-8874-35b0c6c040ef tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.691s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.924160] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255418, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.966886] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255416, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.084559] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255417, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.189064] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "c1b1ac1a-32da-442d-86ef-d754165f5a81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.189405] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "c1b1ac1a-32da-442d-86ef-d754165f5a81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.427155] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255418, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.752508} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.431862] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 691.433580] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa332557-aa62-48c6-b548-5c8189a8992d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.466327] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] d7ed99e5-3f96-4053-9b9a-a4b7edb1f351/d7ed99e5-3f96-4053-9b9a-a4b7edb1f351.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 691.475264] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8a6ae28-b0f8-42f0-93c3-7c96f1416a92 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.504481] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255416, 'name': ReconfigVM_Task, 'duration_secs': 1.288838} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.505756] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3/1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.506140] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 691.506140] env[61545]: value = "task-4255419" [ 691.506140] env[61545]: _type = "Task" [ 691.506140] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.506351] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d381fba-2680-44ea-a49b-dd305938d1a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.521617] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255419, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.527947] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 691.527947] env[61545]: value = "task-4255420" [ 691.527947] env[61545]: _type = "Task" [ 691.527947] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.536972] env[61545]: DEBUG nova.network.neutron [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Updating instance_info_cache with network_info: [{"id": "4e58a211-5e27-49a0-a9b2-0cb26978fd99", "address": "fa:16:3e:66:ef:81", "network": {"id": "415f40c5-693f-4f21-a64d-29a5b40dd50d", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-878928979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82cfc8f4bc7d4a6da10e234b077f761f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e58a211-5e", "ovs_interfaceid": "4e58a211-5e27-49a0-a9b2-0cb26978fd99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.544262] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255420, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.583280] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255417, 'name': CreateVM_Task, 'duration_secs': 1.259333} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.586953] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 691.588043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.588043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.588404] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 691.589344] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8beb304b-cb98-4874-8d25-39681675f580 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.596729] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 691.596729] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527f499d-b464-608c-2f31-5bd8e52f896e" [ 691.596729] env[61545]: _type = "Task" [ 691.596729] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.616043] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527f499d-b464-608c-2f31-5bd8e52f896e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.692020] env[61545]: DEBUG nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 691.799012] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d1c0fd-d880-4c32-9196-06a76ecbf35a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.806998] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2902d68d-26e8-4d1f-8c85-29d93f98b272 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.847619] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef4f9a2-4d09-46a3-af38-ae20dd1ade16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.856018] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c800a5-eb1c-4cbc-aab2-b2e16dd6edc5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.871835] env[61545]: DEBUG nova.compute.provider_tree [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.021767] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.047022] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255420, 'name': Rename_Task, 'duration_secs': 0.237681} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.047022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Releasing lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.047178] env[61545]: DEBUG nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Instance network_info: |[{"id": "4e58a211-5e27-49a0-a9b2-0cb26978fd99", "address": "fa:16:3e:66:ef:81", "network": {"id": "415f40c5-693f-4f21-a64d-29a5b40dd50d", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-878928979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82cfc8f4bc7d4a6da10e234b077f761f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e58a211-5e", "ovs_interfaceid": "4e58a211-5e27-49a0-a9b2-0cb26978fd99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 692.047178] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 692.047317] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:ef:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e58a211-5e27-49a0-a9b2-0cb26978fd99', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 692.056030] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Creating folder: Project (82cfc8f4bc7d4a6da10e234b077f761f). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 692.056818] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-035e46bf-88ce-47f4-90aa-c312df18526c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.058759] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b2782f1-a7ae-4d8b-98b8-61f33a064865 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.068878] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 692.068878] env[61545]: value = "task-4255422" [ 692.068878] env[61545]: _type = "Task" [ 692.068878] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.077345] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Created folder: Project (82cfc8f4bc7d4a6da10e234b077f761f) in parent group-v838542. [ 692.077586] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Creating folder: Instances. Parent ref: group-v838586. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 692.078417] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca380287-d371-4e3c-9d0d-88ab24a5587f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.085915] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.098483] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Created folder: Instances in parent group-v838586. [ 692.098483] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 692.104760] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 692.104760] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4da17c5-b9cc-47a2-a62a-61d9ffe49cbe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.125074] env[61545]: DEBUG nova.network.neutron [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Successfully updated port: f3a5e417-6468-4590-aa5e-e24c16364727 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 692.139839] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527f499d-b464-608c-2f31-5bd8e52f896e, 'name': SearchDatastore_Task, 'duration_secs': 0.022937} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.149934] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.149934] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.149934] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.149934] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.150242] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 692.150242] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 692.150242] env[61545]: value = "task-4255425" [ 692.150242] env[61545]: _type = "Task" [ 692.150242] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.150242] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9874dd99-e346-47f3-b26f-1a5d864dbec2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.170271] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255425, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.174189] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 692.174581] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 692.175433] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e648c3d-1a61-4ef1-a0f7-00923c466295 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.186756] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 692.186756] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d1933a-1232-4470-cb27-30e2d6f0887e" [ 692.186756] env[61545]: _type = "Task" [ 692.186756] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.202956] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d1933a-1232-4470-cb27-30e2d6f0887e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.230292] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.375374] env[61545]: DEBUG nova.scheduler.client.report [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 692.524105] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255419, 'name': ReconfigVM_Task, 'duration_secs': 0.785822} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.524105] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Reconfigured VM instance instance-0000000d to attach disk [datastore2] d7ed99e5-3f96-4053-9b9a-a4b7edb1f351/d7ed99e5-3f96-4053-9b9a-a4b7edb1f351.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 692.524867] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13ad8463-7e97-44e5-9a61-83f5ed2b0dbe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.538912] env[61545]: DEBUG nova.compute.manager [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Received event network-vif-plugged-4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 692.539304] env[61545]: DEBUG oslo_concurrency.lockutils [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] Acquiring lock "b2579785-d1a4-48da-ba27-6ee3098578f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.539599] env[61545]: DEBUG oslo_concurrency.lockutils [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] Lock "b2579785-d1a4-48da-ba27-6ee3098578f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.539790] env[61545]: DEBUG oslo_concurrency.lockutils [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] Lock "b2579785-d1a4-48da-ba27-6ee3098578f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.539928] env[61545]: DEBUG nova.compute.manager [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] No waiting events found dispatching network-vif-plugged-4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 692.540122] env[61545]: WARNING nova.compute.manager [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Received unexpected event network-vif-plugged-4e58a211-5e27-49a0-a9b2-0cb26978fd99 for instance with vm_state building and task_state spawning. [ 692.540291] env[61545]: DEBUG nova.compute.manager [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Received event network-changed-4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 692.540446] env[61545]: DEBUG nova.compute.manager [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Refreshing instance network info cache due to event network-changed-4e58a211-5e27-49a0-a9b2-0cb26978fd99. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 692.540626] env[61545]: DEBUG oslo_concurrency.lockutils [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] Acquiring lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.540758] env[61545]: DEBUG oslo_concurrency.lockutils [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] Acquired lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.540908] env[61545]: DEBUG nova.network.neutron [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Refreshing network info cache for port 4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 692.542685] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 692.542685] env[61545]: value = "task-4255426" [ 692.542685] env[61545]: _type = "Task" [ 692.542685] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.561504] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255426, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.584389] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255422, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.643753] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquiring lock "refresh_cache-8214216a-0256-467e-ac4c-1d14b0f73b77" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.643916] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquired lock "refresh_cache-8214216a-0256-467e-ac4c-1d14b0f73b77" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.644553] env[61545]: DEBUG nova.network.neutron [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.665878] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255425, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.704939] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d1933a-1232-4470-cb27-30e2d6f0887e, 'name': SearchDatastore_Task, 'duration_secs': 0.028984} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.708638] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-567c3644-3da0-4000-be83-0dc23c413989 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.717428] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 692.717428] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ef4498-9a2f-4382-ddd7-a9a8c0694ffa" [ 692.717428] env[61545]: _type = "Task" [ 692.717428] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.729577] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ef4498-9a2f-4382-ddd7-a9a8c0694ffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.885734] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.886300] env[61545]: DEBUG nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 692.889225] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.772s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.889461] env[61545]: DEBUG nova.objects.instance [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lazy-loading 'resources' on Instance uuid 5d9eadff-7f13-4720-8119-5829b4802c21 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 693.065943] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255426, 'name': Rename_Task, 'duration_secs': 0.276311} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.066544] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 693.066973] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d00df0dc-9d1e-46c8-b9bb-b162a3a31edd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.083348] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255422, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.083496] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 693.083496] env[61545]: value = "task-4255427" [ 693.083496] env[61545]: _type = "Task" [ 693.083496] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.094868] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.167887] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255425, 'name': CreateVM_Task, 'duration_secs': 0.580188} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.168088] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 693.169337] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.169506] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.169838] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 693.170146] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce2c2b13-6511-4841-a442-a0f020dcd371 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.176460] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 693.176460] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bcb011-c9ca-60f7-f860-1c2c7a27547f" [ 693.176460] env[61545]: _type = "Task" [ 693.176460] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.186140] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bcb011-c9ca-60f7-f860-1c2c7a27547f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.237745] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ef4498-9a2f-4382-ddd7-a9a8c0694ffa, 'name': SearchDatastore_Task, 'duration_secs': 0.015257} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.238035] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.238312] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 693.238700] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aed46d3d-c280-4c14-93c0-1214ea29e43f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.248223] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 693.248223] env[61545]: value = "task-4255428" [ 693.248223] env[61545]: _type = "Task" [ 693.248223] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.253027] env[61545]: DEBUG nova.network.neutron [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.261900] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.393844] env[61545]: DEBUG nova.compute.utils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 693.401529] env[61545]: DEBUG nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 693.401792] env[61545]: DEBUG nova.network.neutron [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 693.594653] env[61545]: DEBUG oslo_vmware.api [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255422, 'name': PowerOnVM_Task, 'duration_secs': 1.066491} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.595563] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 693.595753] env[61545]: INFO nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Took 14.07 seconds to spawn the instance on the hypervisor. [ 693.595944] env[61545]: DEBUG nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 693.596934] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fca7b31-9522-470a-8809-bf46d8e8339f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.609765] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255427, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.616879] env[61545]: DEBUG nova.policy [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a926fd4a58ac4f989e04259d46663bc9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f598bcded6824792b972dfec9fc0fa22', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 693.691528] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bcb011-c9ca-60f7-f860-1c2c7a27547f, 'name': SearchDatastore_Task, 'duration_secs': 0.034639} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.691989] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.694021] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.694021] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.694021] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.694021] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.697758] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-feb65450-da5f-4cb3-9c4e-00eac791032e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.726477] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.726866] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 693.728990] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adab3892-c298-4a92-b91e-ada0fab3d96b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.741150] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 693.741150] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5280804f-0c9d-b21d-e561-2305ada8d0ca" [ 693.741150] env[61545]: _type = "Task" [ 693.741150] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.769945] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5280804f-0c9d-b21d-e561-2305ada8d0ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.778017] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255428, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.861744] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdf2cd6-9e40-4f75-aede-7e01dd284a43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.874369] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d0b8d5-0937-4ae9-af51-170cb500588b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.915742] env[61545]: DEBUG nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 693.923329] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e37ce2-28dd-47b2-8df0-87356a99fe63 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.933235] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287652e9-62df-4c0f-a616-fc4e0573aa94 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.949654] env[61545]: DEBUG nova.compute.provider_tree [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.959794] env[61545]: DEBUG nova.network.neutron [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Updating instance_info_cache with network_info: [{"id": "f3a5e417-6468-4590-aa5e-e24c16364727", "address": "fa:16:3e:21:f3:87", "network": {"id": "9faf910d-15c6-4c14-95e6-1cad711ba111", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1263074816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d4e84d3c1e741aa985cf8f6527112b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a5e417-64", "ovs_interfaceid": "f3a5e417-6468-4590-aa5e-e24c16364727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.042475] env[61545]: DEBUG nova.network.neutron [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Updated VIF entry in instance network info cache for port 4e58a211-5e27-49a0-a9b2-0cb26978fd99. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 694.042986] env[61545]: DEBUG nova.network.neutron [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Updating instance_info_cache with network_info: [{"id": "4e58a211-5e27-49a0-a9b2-0cb26978fd99", "address": "fa:16:3e:66:ef:81", "network": {"id": "415f40c5-693f-4f21-a64d-29a5b40dd50d", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-878928979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82cfc8f4bc7d4a6da10e234b077f761f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e58a211-5e", "ovs_interfaceid": "4e58a211-5e27-49a0-a9b2-0cb26978fd99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.098799] env[61545]: DEBUG oslo_vmware.api [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255427, 'name': PowerOnVM_Task, 'duration_secs': 0.937768} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.101462] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 694.101596] env[61545]: INFO nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Took 12.11 seconds to spawn the instance on the hypervisor. [ 694.102144] env[61545]: DEBUG nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.103386] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fb8c5b-4597-427c-9a12-2ed73d4a3254 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.142487] env[61545]: INFO nova.compute.manager [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Took 30.53 seconds to build instance. [ 694.268082] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquiring lock "9dbff26a-210c-4e80-812f-c91debe3e9c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.268082] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "9dbff26a-210c-4e80-812f-c91debe3e9c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.269225] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5280804f-0c9d-b21d-e561-2305ada8d0ca, 'name': SearchDatastore_Task, 'duration_secs': 0.070067} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.273415] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5566c716-02e6-47a6-bae3-4f9e22f552b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.287438] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.7117} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.287942] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.288202] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.288468] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34045568-d149-4cbc-9afb-357d72e84320 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.297486] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 694.297486] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52dcbe4e-117b-4ba7-f330-d3c3b957bb2c" [ 694.297486] env[61545]: _type = "Task" [ 694.297486] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.304624] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 694.304624] env[61545]: value = "task-4255430" [ 694.304624] env[61545]: _type = "Task" [ 694.304624] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.323486] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52dcbe4e-117b-4ba7-f330-d3c3b957bb2c, 'name': SearchDatastore_Task, 'duration_secs': 0.013272} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.324996] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.327093] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] b2579785-d1a4-48da-ba27-6ee3098578f1/b2579785-d1a4-48da-ba27-6ee3098578f1.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 694.327852] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e38ee697-7b41-441a-a235-a2cdc419e4e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.334663] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.340593] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 694.340593] env[61545]: value = "task-4255431" [ 694.340593] env[61545]: _type = "Task" [ 694.340593] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.354530] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.455794] env[61545]: DEBUG nova.scheduler.client.report [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.464620] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Releasing lock "refresh_cache-8214216a-0256-467e-ac4c-1d14b0f73b77" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.465747] env[61545]: DEBUG nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Instance network_info: |[{"id": "f3a5e417-6468-4590-aa5e-e24c16364727", "address": "fa:16:3e:21:f3:87", "network": {"id": "9faf910d-15c6-4c14-95e6-1cad711ba111", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1263074816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d4e84d3c1e741aa985cf8f6527112b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a5e417-64", "ovs_interfaceid": "f3a5e417-6468-4590-aa5e-e24c16364727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 694.466164] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:f3:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc845e3-654b-43c6-acea-dde1084f0ad0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3a5e417-6468-4590-aa5e-e24c16364727', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 694.473585] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Creating folder: Project (6d4e84d3c1e741aa985cf8f6527112b2). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.479882] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f1f5266-9a8d-400a-a778-1c5ef3b09655 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.490830] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Created folder: Project (6d4e84d3c1e741aa985cf8f6527112b2) in parent group-v838542. [ 694.491084] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Creating folder: Instances. Parent ref: group-v838589. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.491712] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ef21aa7-a1ce-4190-945e-972c2c14073d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.504834] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Created folder: Instances in parent group-v838589. [ 694.505114] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.505354] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 694.505629] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64c9b90e-7060-45da-8432-9d1b37ba587c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.530704] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 694.530704] env[61545]: value = "task-4255434" [ 694.530704] env[61545]: _type = "Task" [ 694.530704] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.543336] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255434, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.545058] env[61545]: DEBUG oslo_concurrency.lockutils [req-700cf197-004a-421f-8d25-8dbe51d672de req-4a929db2-3b73-4995-8ac7-895528ad7b50 service nova] Releasing lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.630707] env[61545]: INFO nova.compute.manager [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Took 29.94 seconds to build instance. [ 694.648985] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6703c4a9-a33a-4fe7-a1f0-0e290b8b5913 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.284s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.772645] env[61545]: DEBUG nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 694.780108] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "4f879b20-bae0-4d50-b5e9-378356341962" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.783664] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "4f879b20-bae0-4d50-b5e9-378356341962" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.783664] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "4f879b20-bae0-4d50-b5e9-378356341962-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.783664] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "4f879b20-bae0-4d50-b5e9-378356341962-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.783664] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "4f879b20-bae0-4d50-b5e9-378356341962-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.783938] env[61545]: INFO nova.compute.manager [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Terminating instance [ 694.820822] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.151311} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.821532] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 694.822353] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7004d64e-06fc-42db-ade0-9125868229da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.857248] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.862317] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce293b16-2f4c-42a8-bedd-50d9e206132f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.882220] env[61545]: DEBUG nova.network.neutron [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Successfully created port: 53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.892253] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255431, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.894855] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 694.894855] env[61545]: value = "task-4255435" [ 694.894855] env[61545]: _type = "Task" [ 694.894855] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.909551] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255435, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.936641] env[61545]: DEBUG nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 694.962411] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.073s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.965027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 18.155s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.980504] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:46:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='615513059',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1340445915',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 694.980787] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 694.980971] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 694.981715] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 694.981896] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 694.982058] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 694.982276] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 694.982440] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 694.982592] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 694.982751] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 694.983599] env[61545]: DEBUG nova.virt.hardware [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 694.984266] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea70e653-3825-4c2d-8ff9-da0780d4bc76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.000220] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd0b059-5e2b-409f-8aa5-7fded7a7d15b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.045034] env[61545]: INFO nova.scheduler.client.report [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Deleted allocations for instance 5d9eadff-7f13-4720-8119-5829b4802c21 [ 695.059388] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255434, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.136948] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c878224f-c699-4fd0-bf7c-7c7fa8662205 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.474s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.289474] env[61545]: DEBUG nova.compute.manager [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 695.291485] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.291485] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6034a95a-1ebf-4efd-a944-a481d658d29d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.295493] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.303708] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 695.304013] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd795881-636b-4137-a72b-73b44ccf626c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.313325] env[61545]: DEBUG oslo_vmware.api [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 695.313325] env[61545]: value = "task-4255436" [ 695.313325] env[61545]: _type = "Task" [ 695.313325] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.323450] env[61545]: DEBUG oslo_vmware.api [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255436, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.355699] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.736428} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.356015] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] b2579785-d1a4-48da-ba27-6ee3098578f1/b2579785-d1a4-48da-ba27-6ee3098578f1.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 695.356307] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 695.356772] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a2bea40-be66-4e17-a4be-3a8fbd5b34c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.367297] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 695.367297] env[61545]: value = "task-4255437" [ 695.367297] env[61545]: _type = "Task" [ 695.367297] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.382041] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255437, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.409851] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255435, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.473660] env[61545]: INFO nova.compute.claims [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.552724] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255434, 'name': CreateVM_Task, 'duration_secs': 0.961803} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.552724] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 695.554286] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.554286] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.554286] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 695.557673] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5227adc0-a46e-4a46-a9be-77b5fa6a0ec7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.560689] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6e7a0588-ce08-40d9-aa3a-c42ad94bf420 tempest-ServerDiagnosticsTest-463569044 tempest-ServerDiagnosticsTest-463569044-project-member] Lock "5d9eadff-7f13-4720-8119-5829b4802c21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.070s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.568684] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 695.568684] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c5a46e-b315-b5bb-145b-2c9aaf352e0b" [ 695.568684] env[61545]: _type = "Task" [ 695.568684] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.578503] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c5a46e-b315-b5bb-145b-2c9aaf352e0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.832242] env[61545]: DEBUG oslo_vmware.api [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255436, 'name': PowerOffVM_Task, 'duration_secs': 0.280178} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.832242] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.832242] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.832242] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dfde9a1-cfbd-4dd9-a79d-e75f9f1865e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.883812] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255437, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087159} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.888555] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 695.890240] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd8ddf6-cce4-45ef-a440-28f2da47cd9b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.915375] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] b2579785-d1a4-48da-ba27-6ee3098578f1/b2579785-d1a4-48da-ba27-6ee3098578f1.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 695.920101] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb2316e5-f8bd-4d56-b02a-dd2a77e33d2c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.935161] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.935161] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.935396] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Deleting the datastore file [datastore2] 4f879b20-bae0-4d50-b5e9-378356341962 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.935789] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8b2baf3-70d6-4ade-ad81-2676d28af4cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.945029] env[61545]: DEBUG oslo_vmware.api [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for the task: (returnval){ [ 695.945029] env[61545]: value = "task-4255440" [ 695.945029] env[61545]: _type = "Task" [ 695.945029] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.949264] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255435, 'name': ReconfigVM_Task, 'duration_secs': 0.709033} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.949543] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 695.949543] env[61545]: value = "task-4255441" [ 695.949543] env[61545]: _type = "Task" [ 695.949543] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.952779] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.953564] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0ff37c0-0847-4371-9160-73c2a422cfb9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.965167] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255441, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.969437] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 695.969437] env[61545]: value = "task-4255442" [ 695.969437] env[61545]: _type = "Task" [ 695.969437] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.969736] env[61545]: DEBUG oslo_vmware.api [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255440, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.980101] env[61545]: INFO nova.compute.resource_tracker [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating resource usage from migration 411a6b97-0992-4370-953c-53dc6fad8c98 [ 695.983294] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255442, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.090527] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c5a46e-b315-b5bb-145b-2c9aaf352e0b, 'name': SearchDatastore_Task, 'duration_secs': 0.043693} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.090527] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.090527] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 696.090527] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.090818] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.090818] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 696.090818] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9446fa4e-8232-4884-a3f0-a367a7a9a5ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.107209] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 696.107209] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 696.116495] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-867179d4-f4d2-4e75-a823-bbbbbab1f7e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.125042] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 696.125042] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5293cda2-28cf-c446-ab09-38fc737fe802" [ 696.125042] env[61545]: _type = "Task" [ 696.125042] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.135266] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5293cda2-28cf-c446-ab09-38fc737fe802, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.411740] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.412055] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.413900] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b88100-5f5c-4ce0-bf12-68e58cf84bd1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.423658] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c20cfe3-e7a1-42be-9365-a51d696973c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.467450] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6b3559-b98f-43d2-b434-c080ad8c4e74 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.483413] env[61545]: DEBUG oslo_vmware.api [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Task: {'id': task-4255440, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396329} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.483413] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255441, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.484736] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 696.484935] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 696.485137] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 696.485324] env[61545]: INFO nova.compute.manager [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Took 1.20 seconds to destroy the instance on the hypervisor. [ 696.485559] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 696.486840] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f63e26-20a6-41fe-93af-340000f43bd2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.492364] env[61545]: DEBUG nova.compute.manager [-] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 696.492531] env[61545]: DEBUG nova.network.neutron [-] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.500890] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255442, 'name': Rename_Task, 'duration_secs': 0.267364} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.508780] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 696.509370] env[61545]: DEBUG nova.compute.provider_tree [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.510628] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06e14916-611b-442e-b719-d6174ca58edf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.519108] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 696.519108] env[61545]: value = "task-4255443" [ 696.519108] env[61545]: _type = "Task" [ 696.519108] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.529510] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.641317] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5293cda2-28cf-c446-ab09-38fc737fe802, 'name': SearchDatastore_Task, 'duration_secs': 0.026101} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.643635] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dd475ed-b8c9-401b-acc1-a259edc9d621 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.652282] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 696.652282] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52718d29-6178-a0eb-5046-a17ef8542144" [ 696.652282] env[61545]: _type = "Task" [ 696.652282] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.667985] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52718d29-6178-a0eb-5046-a17ef8542144, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.918307] env[61545]: DEBUG nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 696.972188] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255441, 'name': ReconfigVM_Task, 'duration_secs': 0.683982} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.972522] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Reconfigured VM instance instance-0000000e to attach disk [datastore2] b2579785-d1a4-48da-ba27-6ee3098578f1/b2579785-d1a4-48da-ba27-6ee3098578f1.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 696.974413] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-734c1c00-7230-4f78-8041-d1844f912fd3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.981904] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 696.981904] env[61545]: value = "task-4255444" [ 696.981904] env[61545]: _type = "Task" [ 696.981904] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.994203] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255444, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.013616] env[61545]: DEBUG nova.scheduler.client.report [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 697.033230] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255443, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.166629] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52718d29-6178-a0eb-5046-a17ef8542144, 'name': SearchDatastore_Task, 'duration_secs': 0.013448} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.169147] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.169147] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8214216a-0256-467e-ac4c-1d14b0f73b77/8214216a-0256-467e-ac4c-1d14b0f73b77.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 697.169147] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26b2f897-16c5-4efb-8656-a13200f0e724 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.179022] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 697.179022] env[61545]: value = "task-4255445" [ 697.179022] env[61545]: _type = "Task" [ 697.179022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.190852] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.366437] env[61545]: DEBUG nova.compute.manager [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Received event network-vif-plugged-f3a5e417-6468-4590-aa5e-e24c16364727 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 697.370022] env[61545]: DEBUG oslo_concurrency.lockutils [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] Acquiring lock "8214216a-0256-467e-ac4c-1d14b0f73b77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.370022] env[61545]: DEBUG oslo_concurrency.lockutils [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] Lock "8214216a-0256-467e-ac4c-1d14b0f73b77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.370022] env[61545]: DEBUG oslo_concurrency.lockutils [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] Lock "8214216a-0256-467e-ac4c-1d14b0f73b77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.370022] env[61545]: DEBUG nova.compute.manager [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] No waiting events found dispatching network-vif-plugged-f3a5e417-6468-4590-aa5e-e24c16364727 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.370022] env[61545]: WARNING nova.compute.manager [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Received unexpected event network-vif-plugged-f3a5e417-6468-4590-aa5e-e24c16364727 for instance with vm_state building and task_state spawning. [ 697.370405] env[61545]: DEBUG nova.compute.manager [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Received event network-changed-f3a5e417-6468-4590-aa5e-e24c16364727 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 697.370405] env[61545]: DEBUG nova.compute.manager [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Refreshing instance network info cache due to event network-changed-f3a5e417-6468-4590-aa5e-e24c16364727. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 697.370405] env[61545]: DEBUG oslo_concurrency.lockutils [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] Acquiring lock "refresh_cache-8214216a-0256-467e-ac4c-1d14b0f73b77" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.370405] env[61545]: DEBUG oslo_concurrency.lockutils [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] Acquired lock "refresh_cache-8214216a-0256-467e-ac4c-1d14b0f73b77" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.370405] env[61545]: DEBUG nova.network.neutron [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Refreshing network info cache for port f3a5e417-6468-4590-aa5e-e24c16364727 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.400202] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "72656070-cfd0-4104-a9c7-ec20c5a6238a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.400202] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "72656070-cfd0-4104-a9c7-ec20c5a6238a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.437154] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "79ba6f70-c967-4abf-a2a7-c70046a2602d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.437154] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "79ba6f70-c967-4abf-a2a7-c70046a2602d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.452732] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.497544] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255444, 'name': Rename_Task, 'duration_secs': 0.299612} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.497544] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 697.497544] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce3257be-11e2-49f8-9d44-8da1c3f6fd67 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.505049] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 697.505049] env[61545]: value = "task-4255446" [ 697.505049] env[61545]: _type = "Task" [ 697.505049] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.513918] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.523031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.555s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.523031] env[61545]: INFO nova.compute.manager [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Migrating [ 697.523031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.523031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.523031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.666s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.523352] env[61545]: DEBUG nova.objects.instance [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lazy-loading 'resources' on Instance uuid 256e48c1-81de-4d32-97dc-ba80541a9239 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 697.539220] env[61545]: DEBUG oslo_vmware.api [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255443, 'name': PowerOnVM_Task, 'duration_secs': 0.546672} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.539795] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.540207] env[61545]: DEBUG nova.compute.manager [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.544027] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acd6e6c-e723-495a-9b08-105af2d323bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.691408] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255445, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.853419] env[61545]: DEBUG nova.compute.manager [req-12929412-0ad3-4fbd-af6c-a9026e235a13 req-1fa86761-0aa5-4e53-9955-8f94ffc58cf4 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Received event network-changed-400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 697.853601] env[61545]: DEBUG nova.compute.manager [req-12929412-0ad3-4fbd-af6c-a9026e235a13 req-1fa86761-0aa5-4e53-9955-8f94ffc58cf4 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Refreshing instance network info cache due to event network-changed-400a0ea3-0087-4d35-bc44-2849c40231e6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 697.853794] env[61545]: DEBUG oslo_concurrency.lockutils [req-12929412-0ad3-4fbd-af6c-a9026e235a13 req-1fa86761-0aa5-4e53-9955-8f94ffc58cf4 service nova] Acquiring lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.854016] env[61545]: DEBUG oslo_concurrency.lockutils [req-12929412-0ad3-4fbd-af6c-a9026e235a13 req-1fa86761-0aa5-4e53-9955-8f94ffc58cf4 service nova] Acquired lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.855689] env[61545]: DEBUG nova.network.neutron [req-12929412-0ad3-4fbd-af6c-a9026e235a13 req-1fa86761-0aa5-4e53-9955-8f94ffc58cf4 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Refreshing network info cache for port 400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.902790] env[61545]: DEBUG nova.network.neutron [-] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.905659] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 698.017894] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255446, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.027852] env[61545]: INFO nova.compute.rpcapi [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 698.028859] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.066542] env[61545]: DEBUG nova.network.neutron [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Successfully updated port: 53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 698.066542] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.196657] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676013} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.197300] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8214216a-0256-467e-ac4c-1d14b0f73b77/8214216a-0256-467e-ac4c-1d14b0f73b77.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 698.199534] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 698.199534] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a09eea8c-732c-4d06-88b5-638f0a48f10b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.220059] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 698.220059] env[61545]: value = "task-4255448" [ 698.220059] env[61545]: _type = "Task" [ 698.220059] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.231380] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.393875] env[61545]: DEBUG nova.network.neutron [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Updated VIF entry in instance network info cache for port f3a5e417-6468-4590-aa5e-e24c16364727. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 698.394282] env[61545]: DEBUG nova.network.neutron [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Updating instance_info_cache with network_info: [{"id": "f3a5e417-6468-4590-aa5e-e24c16364727", "address": "fa:16:3e:21:f3:87", "network": {"id": "9faf910d-15c6-4c14-95e6-1cad711ba111", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1263074816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d4e84d3c1e741aa985cf8f6527112b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3a5e417-64", "ovs_interfaceid": "f3a5e417-6468-4590-aa5e-e24c16364727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.415818] env[61545]: INFO nova.compute.manager [-] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Took 1.92 seconds to deallocate network for instance. [ 698.449485] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.522246] env[61545]: DEBUG oslo_vmware.api [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255446, 'name': PowerOnVM_Task, 'duration_secs': 0.873075} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.525899] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 698.526320] env[61545]: INFO nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Took 13.94 seconds to spawn the instance on the hypervisor. [ 698.526889] env[61545]: DEBUG nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 698.528396] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a166cd41-b00a-4241-afdd-cfa4b3d1249e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.555034] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.555267] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.555453] env[61545]: DEBUG nova.network.neutron [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.567704] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.567924] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquired lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.568076] env[61545]: DEBUG nova.network.neutron [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.586528] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c031d8b-9957-4e9f-b890-3c50c532feb8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.599998] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d85652-b2fe-42bd-af0e-4d81a1f86494 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.643666] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "4b29ebc4-d913-447c-bc57-890953cf8d49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.644473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "4b29ebc4-d913-447c-bc57-890953cf8d49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.645164] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a846631-3621-425d-93b3-5f233455d86f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.655375] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e84f837-de4e-42ba-b773-79d1b454f623 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.671469] env[61545]: DEBUG nova.compute.provider_tree [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 698.730988] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114644} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.733805] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 698.734779] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb81c41-cb9e-4d61-8abf-1302ca114cfa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.760749] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 8214216a-0256-467e-ac4c-1d14b0f73b77/8214216a-0256-467e-ac4c-1d14b0f73b77.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 698.760749] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afc9537a-8e2a-411f-abb6-216dc4704caf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.781353] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 698.781353] env[61545]: value = "task-4255449" [ 698.781353] env[61545]: _type = "Task" [ 698.781353] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.794179] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255449, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.899162] env[61545]: DEBUG oslo_concurrency.lockutils [req-311efad5-c01d-402f-890c-ce07ca9a3004 req-ad6da71d-5442-40db-981b-05c450fd9119 service nova] Releasing lock "refresh_cache-8214216a-0256-467e-ac4c-1d14b0f73b77" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.934197] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.944190] env[61545]: DEBUG nova.network.neutron [req-12929412-0ad3-4fbd-af6c-a9026e235a13 req-1fa86761-0aa5-4e53-9955-8f94ffc58cf4 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Updated VIF entry in instance network info cache for port 400a0ea3-0087-4d35-bc44-2849c40231e6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 698.944640] env[61545]: DEBUG nova.network.neutron [req-12929412-0ad3-4fbd-af6c-a9026e235a13 req-1fa86761-0aa5-4e53-9955-8f94ffc58cf4 service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Updating instance_info_cache with network_info: [{"id": "400a0ea3-0087-4d35-bc44-2849c40231e6", "address": "fa:16:3e:24:ec:a6", "network": {"id": "5c67fa2d-8198-40b5-a811-d302e1d825c1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-800705450-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d8c8b7d5250486a902f8655029c4f97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap400a0ea3-00", "ovs_interfaceid": "400a0ea3-0087-4d35-bc44-2849c40231e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.975309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquiring lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.975605] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.975825] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquiring lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.976055] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.976274] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.979033] env[61545]: INFO nova.compute.manager [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Terminating instance [ 699.056167] env[61545]: INFO nova.compute.manager [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Took 33.23 seconds to build instance. [ 699.206847] env[61545]: ERROR nova.scheduler.client.report [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] [req-0e912a74-8a69-4c62-ab2c-f2d24861bb34] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0e912a74-8a69-4c62-ab2c-f2d24861bb34"}]} [ 699.227466] env[61545]: DEBUG nova.scheduler.client.report [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 699.241275] env[61545]: DEBUG nova.scheduler.client.report [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 699.241677] env[61545]: DEBUG nova.compute.provider_tree [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 699.255615] env[61545]: DEBUG nova.scheduler.client.report [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 699.255615] env[61545]: DEBUG nova.compute.provider_tree [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 34 to 35 during operation: update_aggregates {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 699.283543] env[61545]: DEBUG nova.scheduler.client.report [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 699.305565] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.405272] env[61545]: DEBUG nova.network.neutron [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.448284] env[61545]: DEBUG oslo_concurrency.lockutils [req-12929412-0ad3-4fbd-af6c-a9026e235a13 req-1fa86761-0aa5-4e53-9955-8f94ffc58cf4 service nova] Releasing lock "refresh_cache-0554c462-1dc5-4043-94ac-7a3d28ed05e1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.489026] env[61545]: DEBUG nova.compute.manager [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 699.489026] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.489026] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1445c9-3c70-4d57-9099-b2abe3321e31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.497858] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 699.500852] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad49b932-e84a-4486-b8e0-02a8672dfc83 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.508763] env[61545]: DEBUG oslo_vmware.api [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 699.508763] env[61545]: value = "task-4255450" [ 699.508763] env[61545]: _type = "Task" [ 699.508763] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.518911] env[61545]: DEBUG oslo_vmware.api [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.559017] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1ad0cabc-b1eb-413b-b2c2-d53c03434c9f tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "b2579785-d1a4-48da-ba27-6ee3098578f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.771s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.783568] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.783897] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.805109] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255449, 'name': ReconfigVM_Task, 'duration_secs': 0.921405} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.805842] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 8214216a-0256-467e-ac4c-1d14b0f73b77/8214216a-0256-467e-ac4c-1d14b0f73b77.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 699.807961] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5e177a4-34d4-4183-89ca-23339844a79c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.814097] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 699.814097] env[61545]: value = "task-4255451" [ 699.814097] env[61545]: _type = "Task" [ 699.814097] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.829467] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255451, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.846783] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d47fac2-58ad-41c9-ba3b-46e95cbdb267 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.861323] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576ec15a-26a9-49ff-b874-6fef9fb4280c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.128781] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec85c4b-f96f-4882-8397-1ca6c6587524 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.128781] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 700.146781] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3c593c-9113-412d-824b-a6bc73d2e049 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.171471] env[61545]: DEBUG oslo_vmware.api [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255450, 'name': PowerOffVM_Task, 'duration_secs': 0.236926} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.171868] env[61545]: DEBUG nova.compute.provider_tree [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.183118] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 700.183118] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 700.183118] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-677a6180-b279-4f2f-b100-e913eacf57e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.250761] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 700.250761] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 700.250761] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Deleting the datastore file [datastore2] 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 700.250761] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d65e7fc6-db7d-497d-ab05-da25c5527662 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.259693] env[61545]: DEBUG oslo_vmware.api [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for the task: (returnval){ [ 700.259693] env[61545]: value = "task-4255454" [ 700.259693] env[61545]: _type = "Task" [ 700.259693] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.268956] env[61545]: DEBUG oslo_vmware.api [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.312285] env[61545]: DEBUG nova.network.neutron [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Updating instance_info_cache with network_info: [{"id": "53f2ef34-f0c8-46d5-86d1-b21ecc745ad5", "address": "fa:16:3e:c7:9e:5f", "network": {"id": "c24102bb-211e-48c8-bbd1-8f1a28f06f9e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587394794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f598bcded6824792b972dfec9fc0fa22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f2ef34-f0", "ovs_interfaceid": "53f2ef34-f0c8-46d5-86d1-b21ecc745ad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.331429] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255451, 'name': Rename_Task, 'duration_secs': 0.315476} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.331530] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 700.331780] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f30ff206-7172-4499-b7b5-f27b6aa73bd2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.342352] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 700.342352] env[61545]: value = "task-4255455" [ 700.342352] env[61545]: _type = "Task" [ 700.342352] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.357030] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.434158] env[61545]: DEBUG nova.network.neutron [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance_info_cache with network_info: [{"id": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "address": "fa:16:3e:1f:b2:06", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd33a6a2d-63", "ovs_interfaceid": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.669416] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.684459] env[61545]: DEBUG nova.scheduler.client.report [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.772298] env[61545]: DEBUG oslo_vmware.api [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Task: {'id': task-4255454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207167} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.772298] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 700.772668] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 700.772668] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 700.772880] env[61545]: INFO nova.compute.manager [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Took 1.29 seconds to destroy the instance on the hypervisor. [ 700.772928] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 700.773410] env[61545]: DEBUG nova.compute.manager [-] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 700.773410] env[61545]: DEBUG nova.network.neutron [-] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.815810] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Releasing lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.817986] env[61545]: DEBUG nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Instance network_info: |[{"id": "53f2ef34-f0c8-46d5-86d1-b21ecc745ad5", "address": "fa:16:3e:c7:9e:5f", "network": {"id": "c24102bb-211e-48c8-bbd1-8f1a28f06f9e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587394794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f598bcded6824792b972dfec9fc0fa22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f2ef34-f0", "ovs_interfaceid": "53f2ef34-f0c8-46d5-86d1-b21ecc745ad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 700.818433] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:9e:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1cf14cf-4f9c-41af-90d0-62e363eb4fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53f2ef34-f0c8-46d5-86d1-b21ecc745ad5', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 700.829135] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Creating folder: Project (f598bcded6824792b972dfec9fc0fa22). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 700.829737] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d895ce39-c4f2-43f6-ba07-70e472f3059e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.847351] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Created folder: Project (f598bcded6824792b972dfec9fc0fa22) in parent group-v838542. [ 700.847940] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Creating folder: Instances. Parent ref: group-v838593. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 700.852031] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6effc606-0e1c-4c10-92ad-04660e1c6615 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.857139] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255455, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.874020] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Created folder: Instances in parent group-v838593. [ 700.874020] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 700.874020] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 700.874020] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2d83032-38d0-4316-afc7-85e015ba75c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.897338] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 700.897338] env[61545]: value = "task-4255458" [ 700.897338] env[61545]: _type = "Task" [ 700.897338] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.907546] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255458, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.937767] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.192434] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.670s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.198679] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.516s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.202385] env[61545]: INFO nova.compute.claims [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.251360] env[61545]: INFO nova.scheduler.client.report [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Deleted allocations for instance 256e48c1-81de-4d32-97dc-ba80541a9239 [ 701.356308] env[61545]: DEBUG oslo_vmware.api [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255455, 'name': PowerOnVM_Task, 'duration_secs': 0.991413} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.357033] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 701.357033] env[61545]: INFO nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Took 14.27 seconds to spawn the instance on the hypervisor. [ 701.357033] env[61545]: DEBUG nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 701.359651] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3a4470-1712-40a7-b104-3800f47abff6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.411042] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255458, 'name': CreateVM_Task, 'duration_secs': 0.41575} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.411392] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 701.412308] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.412624] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.413437] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 701.413877] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58c681cc-53d1-43e0-a428-fae1c3ce5e08 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.422588] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 701.422588] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526ee784-91c9-0f12-8500-28ea7c2ce08e" [ 701.422588] env[61545]: _type = "Task" [ 701.422588] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.433796] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526ee784-91c9-0f12-8500-28ea7c2ce08e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.745489] env[61545]: DEBUG nova.compute.manager [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Received event network-vif-deleted-4823df3e-d5b5-411f-b835-7dddc654e899 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 701.745489] env[61545]: DEBUG nova.compute.manager [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Received event network-vif-plugged-53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 701.745489] env[61545]: DEBUG oslo_concurrency.lockutils [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] Acquiring lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.745489] env[61545]: DEBUG oslo_concurrency.lockutils [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] Lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.745670] env[61545]: DEBUG oslo_concurrency.lockutils [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] Lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.745748] env[61545]: DEBUG nova.compute.manager [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] No waiting events found dispatching network-vif-plugged-53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 701.745908] env[61545]: WARNING nova.compute.manager [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Received unexpected event network-vif-plugged-53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 for instance with vm_state building and task_state spawning. [ 701.746075] env[61545]: DEBUG nova.compute.manager [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Received event network-changed-53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 701.746242] env[61545]: DEBUG nova.compute.manager [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Refreshing instance network info cache due to event network-changed-53f2ef34-f0c8-46d5-86d1-b21ecc745ad5. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 701.746407] env[61545]: DEBUG oslo_concurrency.lockutils [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] Acquiring lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.746542] env[61545]: DEBUG oslo_concurrency.lockutils [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] Acquired lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.746837] env[61545]: DEBUG nova.network.neutron [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Refreshing network info cache for port 53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.766946] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4f5f0886-86d0-4e7c-a954-8371c8142684 tempest-TenantUsagesTestJSON-197157338 tempest-TenantUsagesTestJSON-197157338-project-member] Lock "256e48c1-81de-4d32-97dc-ba80541a9239" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.352s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.891275] env[61545]: INFO nova.compute.manager [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Took 34.44 seconds to build instance. [ 701.936576] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526ee784-91c9-0f12-8500-28ea7c2ce08e, 'name': SearchDatastore_Task, 'duration_secs': 0.014631} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.936576] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.937391] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 701.937923] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.938535] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.938939] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.939683] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e77845c-bffc-46fd-bb90-89eba7dcf7aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.958842] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.959174] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 701.960234] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-673190eb-c0e0-4a5b-bd1d-c718feff4e6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.967824] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 701.967824] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ac1783-8b81-d50a-44ea-f20b8267427e" [ 701.967824] env[61545]: _type = "Task" [ 701.967824] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.979148] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ac1783-8b81-d50a-44ea-f20b8267427e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.125546] env[61545]: DEBUG nova.network.neutron [-] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.397178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ab83ea-0b63-44af-885e-c5f26269aa06 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "8214216a-0256-467e-ac4c-1d14b0f73b77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.327s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.467697] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8800a7-1511-4491-b1f3-b03ce46d0402 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.518298] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance 'eced4107-b99e-479e-b22c-2157320ecf95' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 702.526191] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ac1783-8b81-d50a-44ea-f20b8267427e, 'name': SearchDatastore_Task, 'duration_secs': 0.0154} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.528090] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a7656c8-77c2-4b60-9b9d-c1481f01cad9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.543063] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 702.543063] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52da5bc3-98d3-6161-6292-d5876b9020de" [ 702.543063] env[61545]: _type = "Task" [ 702.543063] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.556395] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52da5bc3-98d3-6161-6292-d5876b9020de, 'name': SearchDatastore_Task, 'duration_secs': 0.014902} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.556395] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.556915] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e8c954ec-de76-4d3e-9a63-6c30523d5b63/e8c954ec-de76-4d3e-9a63-6c30523d5b63.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 702.560138] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dc9bd4b-3105-4ff5-bb16-ee8d149e4bea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.571508] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 702.571508] env[61545]: value = "task-4255460" [ 702.571508] env[61545]: _type = "Task" [ 702.571508] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.588051] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.630730] env[61545]: INFO nova.compute.manager [-] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Took 1.86 seconds to deallocate network for instance. [ 702.648875] env[61545]: INFO nova.compute.manager [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Rebuilding instance [ 702.727835] env[61545]: DEBUG nova.compute.manager [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 702.732581] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e73335-5a10-4227-a977-69aca3e145ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.814746] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2385988-4866-4c6e-9bda-456f8ee6c911 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.823954] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb16fac-5900-47ed-ad15-dfa2b09b6bc7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.865035] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0cb01b-323f-4ab3-b492-ad48d8c7ec4f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.874535] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c34cb72-07f3-434f-a310-ca35aad3c694 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.898757] env[61545]: DEBUG nova.compute.provider_tree [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.900453] env[61545]: DEBUG nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 703.029711] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.030491] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39eaec24-5a9f-4282-a5b2-dd9036841686 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.039811] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 703.039811] env[61545]: value = "task-4255461" [ 703.039811] env[61545]: _type = "Task" [ 703.039811] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.051469] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.085050] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255460, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.154830] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.177604] env[61545]: DEBUG nova.network.neutron [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Updated VIF entry in instance network info cache for port 53f2ef34-f0c8-46d5-86d1-b21ecc745ad5. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 703.178234] env[61545]: DEBUG nova.network.neutron [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Updating instance_info_cache with network_info: [{"id": "53f2ef34-f0c8-46d5-86d1-b21ecc745ad5", "address": "fa:16:3e:c7:9e:5f", "network": {"id": "c24102bb-211e-48c8-bbd1-8f1a28f06f9e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587394794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f598bcded6824792b972dfec9fc0fa22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f2ef34-f0", "ovs_interfaceid": "53f2ef34-f0c8-46d5-86d1-b21ecc745ad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.409321] env[61545]: DEBUG nova.scheduler.client.report [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 703.451315] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.554829] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255461, 'name': PowerOffVM_Task, 'duration_secs': 0.333813} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.555535] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 703.555535] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance 'eced4107-b99e-479e-b22c-2157320ecf95' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 703.582318] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.905502} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.582622] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e8c954ec-de76-4d3e-9a63-6c30523d5b63/e8c954ec-de76-4d3e-9a63-6c30523d5b63.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 703.583150] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 703.583150] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58bfcd04-38ce-428b-9078-852c83c81115 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.592723] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 703.592723] env[61545]: value = "task-4255462" [ 703.592723] env[61545]: _type = "Task" [ 703.592723] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.605537] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.687134] env[61545]: DEBUG oslo_concurrency.lockutils [req-8d62a444-d4d3-4af4-b0e3-6e34110f174a req-b565869c-65a6-4944-bbdc-a5a2e4b3cb97 service nova] Releasing lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.753445] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.753687] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8848f1b7-1812-4652-9967-b85556b35615 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.762667] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 703.762667] env[61545]: value = "task-4255463" [ 703.762667] env[61545]: _type = "Task" [ 703.762667] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.780186] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255463, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.925592] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.724s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.925592] env[61545]: DEBUG nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 703.926212] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.379s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.927654] env[61545]: INFO nova.compute.claims [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.062646] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.064291] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.064576] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.064773] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.064917] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.065061] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.065330] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.065688] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.065688] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.065849] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.066027] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.073198] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efead86c-cc41-4aa5-9dc3-f39d77b62129 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.102157] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 704.102157] env[61545]: value = "task-4255465" [ 704.102157] env[61545]: _type = "Task" [ 704.102157] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.114794] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.152395} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.115950] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 704.116997] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4e4dd0-7877-4aeb-890f-942dc914ac89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.125060] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.148016] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] e8c954ec-de76-4d3e-9a63-6c30523d5b63/e8c954ec-de76-4d3e-9a63-6c30523d5b63.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 704.148388] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8724cc3d-8937-4e42-bc53-428ee3bc42a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.172772] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 704.172772] env[61545]: value = "task-4255466" [ 704.172772] env[61545]: _type = "Task" [ 704.172772] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.187021] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.276054] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255463, 'name': PowerOffVM_Task, 'duration_secs': 0.265767} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.276606] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 704.276695] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 704.279080] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65659634-dffa-4ff5-b29b-f3036070761e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.287332] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 704.287962] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38e31687-0453-4532-9070-3c812b8eda3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.320036] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 704.320149] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 704.320461] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Deleting the datastore file [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 704.320654] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73a4105f-529f-4fa5-971b-fd929d46b036 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.332892] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 704.332892] env[61545]: value = "task-4255468" [ 704.332892] env[61545]: _type = "Task" [ 704.332892] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.352746] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.436771] env[61545]: DEBUG nova.compute.utils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 704.438392] env[61545]: DEBUG nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 704.438623] env[61545]: DEBUG nova.network.neutron [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 704.481139] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "5a284df5-88ea-43bf-9944-ef344f99591c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.481467] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.614985] env[61545]: DEBUG nova.policy [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '113fc58985704b0b9e0a28be2f61cd68', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9007a6e389c0467c8e2077309984eaab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 704.625940] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.690303] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255466, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.843865] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166875} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.844048] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.844292] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 704.844754] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.896814] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquiring lock "1722d63d-e604-44fe-8198-13e6c5bce016" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.897077] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "1722d63d-e604-44fe-8198-13e6c5bce016" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.947675] env[61545]: DEBUG nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 705.118695] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.195292] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255466, 'name': ReconfigVM_Task, 'duration_secs': 0.532183} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.199186] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Reconfigured VM instance instance-00000010 to attach disk [datastore2] e8c954ec-de76-4d3e-9a63-6c30523d5b63/e8c954ec-de76-4d3e-9a63-6c30523d5b63.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 705.200053] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05f2a5c8-53dd-486f-aa75-3dd10e658805 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.209267] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 705.209267] env[61545]: value = "task-4255469" [ 705.209267] env[61545]: _type = "Task" [ 705.209267] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.218281] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255469, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.498198] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43423b05-6214-4e2b-88a8-efb05924c53c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.506743] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45b11e1-c80b-4978-a51d-421007818662 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.539965] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b2f834-6036-48d4-a830-e24f62bccaf8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.548829] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717415e4-d128-4cc8-9b5d-eda577058ab9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.568480] env[61545]: DEBUG nova.compute.provider_tree [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.622034] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255465, 'name': ReconfigVM_Task, 'duration_secs': 1.245688} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.622034] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance 'eced4107-b99e-479e-b22c-2157320ecf95' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 705.726176] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255469, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.727993] env[61545]: DEBUG nova.compute.manager [req-13b4cb10-71b7-420f-b559-4f8a03c68769 req-0141fa5c-ad49-45df-899d-50363f31bf1f service nova] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Received event network-vif-deleted-2c39c356-8b63-44c6-9bcf-46f4765b0c4b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 705.890809] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 705.891438] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.891438] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 705.891438] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.891589] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 705.891646] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 705.891863] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 705.891997] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 705.892181] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 705.892345] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 705.892871] env[61545]: DEBUG nova.virt.hardware [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 705.894287] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55d9a83-f04b-4497-84f6-d172b9209f51 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.909108] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd31a29a-4ea1-43b9-9794-cf58905739a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.935535] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.942025] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.943639] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.943639] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f72dbe9-5918-4d26-a427-c101fea5a44e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.960348] env[61545]: DEBUG nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 705.964277] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.964277] env[61545]: value = "task-4255470" [ 705.964277] env[61545]: _type = "Task" [ 705.964277] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.974737] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255470, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.997400] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 705.997698] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.997698] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 705.998340] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.998340] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 705.998340] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 705.998813] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 705.998813] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 705.998813] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 705.998917] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 705.999081] env[61545]: DEBUG nova.virt.hardware [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 706.000315] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f69ec75-73df-4e1c-bc96-3c46762a888b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.003906] env[61545]: DEBUG nova.network.neutron [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Successfully created port: f8265baf-2284-40a0-b20a-88199fb2bbda {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.011768] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85e69fc-9014-4f84-853a-05d3aeb46ef6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.104280] env[61545]: ERROR nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [req-a79ece8d-3784-4d2b-bf00-eedfff53f6f7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a79ece8d-3784-4d2b-bf00-eedfff53f6f7"}]} [ 706.124662] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 706.132163] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 706.132582] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.132582] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 706.133718] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.134188] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 706.134433] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 706.135863] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 706.135863] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 706.135863] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 706.136196] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 706.137527] env[61545]: DEBUG nova.virt.hardware [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 706.144935] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Reconfiguring VM instance instance-00000006 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 706.146210] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 706.153547] env[61545]: DEBUG nova.compute.provider_tree [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 706.153899] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-938c8460-94d1-47d9-b0dd-7b4e4af69e70 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.180436] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 706.180436] env[61545]: value = "task-4255471" [ 706.180436] env[61545]: _type = "Task" [ 706.180436] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.193710] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255471, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.201531] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 706.223284] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255469, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.225353] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 706.397106] env[61545]: DEBUG nova.compute.manager [req-3a147723-8689-4100-98e8-87bcf5c6680a req-c1ad20d7-c5dd-4673-a494-e40a2305850c service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Received event network-changed-4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 706.397343] env[61545]: DEBUG nova.compute.manager [req-3a147723-8689-4100-98e8-87bcf5c6680a req-c1ad20d7-c5dd-4673-a494-e40a2305850c service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Refreshing instance network info cache due to event network-changed-4e58a211-5e27-49a0-a9b2-0cb26978fd99. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 706.397564] env[61545]: DEBUG oslo_concurrency.lockutils [req-3a147723-8689-4100-98e8-87bcf5c6680a req-c1ad20d7-c5dd-4673-a494-e40a2305850c service nova] Acquiring lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.397715] env[61545]: DEBUG oslo_concurrency.lockutils [req-3a147723-8689-4100-98e8-87bcf5c6680a req-c1ad20d7-c5dd-4673-a494-e40a2305850c service nova] Acquired lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.397871] env[61545]: DEBUG nova.network.neutron [req-3a147723-8689-4100-98e8-87bcf5c6680a req-c1ad20d7-c5dd-4673-a494-e40a2305850c service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Refreshing network info cache for port 4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.483055] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255470, 'name': CreateVM_Task, 'duration_secs': 0.402309} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.485775] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.486915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.486915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.490801] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.491549] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b0bcb57-a0dc-40c9-8660-19d136f5a0a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.498983] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 706.498983] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5269ec51-3ed6-fad0-b914-9455021c4af8" [ 706.498983] env[61545]: _type = "Task" [ 706.498983] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.513582] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5269ec51-3ed6-fad0-b914-9455021c4af8, 'name': SearchDatastore_Task, 'duration_secs': 0.012297} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.513889] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.514159] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.514405] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.514549] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.514725] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 706.515011] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fe1c290-db82-457f-90da-f04f1db2d583 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.531187] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 706.531824] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 706.532210] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b45e827-3b00-4fbf-9a9f-cd7e863a8385 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.539164] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 706.539164] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52394899-031a-7309-1ecf-36f26fb8ea28" [ 706.539164] env[61545]: _type = "Task" [ 706.539164] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.556124] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52394899-031a-7309-1ecf-36f26fb8ea28, 'name': SearchDatastore_Task, 'duration_secs': 0.010184} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.556622] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-354b01dd-98a0-4f3f-8ea5-f1622406fb5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.568424] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 706.568424] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52087f0d-9b3b-fc05-5ee0-1904fac5c348" [ 706.568424] env[61545]: _type = "Task" [ 706.568424] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.579805] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52087f0d-9b3b-fc05-5ee0-1904fac5c348, 'name': SearchDatastore_Task, 'duration_secs': 0.011677} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.582892] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.583111] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 706.587466] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aabcbfff-0768-433b-b056-fd6dd0484b23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.591522] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 706.591522] env[61545]: value = "task-4255472" [ 706.591522] env[61545]: _type = "Task" [ 706.591522] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.600314] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.695268] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255471, 'name': ReconfigVM_Task, 'duration_secs': 0.208889} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.695629] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Reconfigured VM instance instance-00000006 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 706.696520] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a430574-65a4-4095-9c57-ea2b02e52c30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.720206] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] eced4107-b99e-479e-b22c-2157320ecf95/eced4107-b99e-479e-b22c-2157320ecf95.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 706.724026] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11fd80a8-7b63-426e-85eb-d4b5fd821d69 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.750296] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255469, 'name': Rename_Task, 'duration_secs': 1.162345} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.751427] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 706.751771] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 706.751771] env[61545]: value = "task-4255473" [ 706.751771] env[61545]: _type = "Task" [ 706.751771] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.752088] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a1f0c0b-f8d5-4051-8652-c9cd26acb464 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.769494] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255473, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.771312] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 706.771312] env[61545]: value = "task-4255474" [ 706.771312] env[61545]: _type = "Task" [ 706.771312] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.784657] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab80d4b-c6ca-4102-8424-a3ab6ab93d31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.795937] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3164c381-e1b5-4b7a-a1d3-186f632c0e8d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.833303] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada41aae-4a56-4eaf-a1c0-7d5cffeb9ea6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.847475] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4f42a9-d955-4be9-b879-e18ea84c4ead {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.852330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquiring lock "8214216a-0256-467e-ac4c-1d14b0f73b77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.852494] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "8214216a-0256-467e-ac4c-1d14b0f73b77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.853052] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquiring lock "8214216a-0256-467e-ac4c-1d14b0f73b77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.853052] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "8214216a-0256-467e-ac4c-1d14b0f73b77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.853052] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "8214216a-0256-467e-ac4c-1d14b0f73b77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.855858] env[61545]: INFO nova.compute.manager [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Terminating instance [ 706.872206] env[61545]: DEBUG nova.compute.provider_tree [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.101812] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496097} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.101812] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.101934] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.102176] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc22650b-04cf-4e94-9bc6-85ab6ac09cdf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.110446] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 707.110446] env[61545]: value = "task-4255475" [ 707.110446] env[61545]: _type = "Task" [ 707.110446] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.122893] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255475, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.264997] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255473, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.281956] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255474, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.382746] env[61545]: DEBUG nova.compute.manager [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 707.383088] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 707.386868] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquiring lock "2a0576f9-d740-4dfa-9783-17eb3987840b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.387028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "2a0576f9-d740-4dfa-9783-17eb3987840b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.388152] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb2cbd8-c25c-47b2-853a-a3c8cb2ecd7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.402577] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 707.405026] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b64bf2b-e9a7-47f7-ba6a-406d6e62beb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.413202] env[61545]: DEBUG oslo_vmware.api [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 707.413202] env[61545]: value = "task-4255476" [ 707.413202] env[61545]: _type = "Task" [ 707.413202] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.420734] env[61545]: ERROR nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [req-931a40c3-2998-43aa-95da-0544a71e0ef0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-931a40c3-2998-43aa-95da-0544a71e0ef0"}]} [ 707.430776] env[61545]: DEBUG oslo_vmware.api [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255476, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.442299] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 707.470492] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 707.470492] env[61545]: DEBUG nova.compute.provider_tree [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.486082] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 707.511330] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 707.557422] env[61545]: DEBUG nova.network.neutron [req-3a147723-8689-4100-98e8-87bcf5c6680a req-c1ad20d7-c5dd-4673-a494-e40a2305850c service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Updated VIF entry in instance network info cache for port 4e58a211-5e27-49a0-a9b2-0cb26978fd99. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.558031] env[61545]: DEBUG nova.network.neutron [req-3a147723-8689-4100-98e8-87bcf5c6680a req-c1ad20d7-c5dd-4673-a494-e40a2305850c service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Updating instance_info_cache with network_info: [{"id": "4e58a211-5e27-49a0-a9b2-0cb26978fd99", "address": "fa:16:3e:66:ef:81", "network": {"id": "415f40c5-693f-4f21-a64d-29a5b40dd50d", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-878928979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82cfc8f4bc7d4a6da10e234b077f761f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e58a211-5e", "ovs_interfaceid": "4e58a211-5e27-49a0-a9b2-0cb26978fd99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.629337] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255475, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064211} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.639688] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 707.640283] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c20238-4bf5-45b5-a147-bca416e4aa14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.666582] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.669639] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24e495a5-9db7-4c92-90bd-cab2b2b36299 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.692446] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 707.692446] env[61545]: value = "task-4255477" [ 707.692446] env[61545]: _type = "Task" [ 707.692446] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.704311] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255477, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.771060] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255473, 'name': ReconfigVM_Task, 'duration_secs': 0.719874} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.771488] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Reconfigured VM instance instance-00000006 to attach disk [datastore2] eced4107-b99e-479e-b22c-2157320ecf95/eced4107-b99e-479e-b22c-2157320ecf95.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 707.772657] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance 'eced4107-b99e-479e-b22c-2157320ecf95' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 707.785178] env[61545]: DEBUG oslo_vmware.api [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255474, 'name': PowerOnVM_Task, 'duration_secs': 0.651975} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.788404] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 707.788404] env[61545]: INFO nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Took 12.85 seconds to spawn the instance on the hypervisor. [ 707.788675] env[61545]: DEBUG nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 707.790222] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372fd689-addb-41aa-890d-b8cc53398065 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.925129] env[61545]: DEBUG oslo_vmware.api [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255476, 'name': PowerOffVM_Task, 'duration_secs': 0.315924} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.929459] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 707.929577] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 707.930478] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b583758f-4e7c-490d-a78a-76beace3a2c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.999179] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 707.999405] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 707.999588] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Deleting the datastore file [datastore2] 8214216a-0256-467e-ac4c-1d14b0f73b77 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 707.999849] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46768dd3-6cfc-4a79-9897-47dae6a02581 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.008265] env[61545]: DEBUG oslo_vmware.api [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for the task: (returnval){ [ 708.008265] env[61545]: value = "task-4255479" [ 708.008265] env[61545]: _type = "Task" [ 708.008265] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.023094] env[61545]: DEBUG oslo_vmware.api [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.026610] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dca475-f50f-4eed-a4be-13a9ce14c121 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.034369] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f67795c-f65f-44db-8934-6e682ab80179 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.065027] env[61545]: DEBUG oslo_concurrency.lockutils [req-3a147723-8689-4100-98e8-87bcf5c6680a req-c1ad20d7-c5dd-4673-a494-e40a2305850c service nova] Releasing lock "refresh_cache-b2579785-d1a4-48da-ba27-6ee3098578f1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.066274] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c9e01c-2635-4bbd-9fce-744ff6aaae09 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.074564] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11ee4be-7972-4c40-a38d-2bd83c426779 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.090691] env[61545]: DEBUG nova.compute.provider_tree [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 708.207071] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255477, 'name': ReconfigVM_Task, 'duration_secs': 0.30476} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.207493] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d/5719daa8-a5bc-4604-b465-a57097695c6d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 708.208262] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c49da6a1-403b-4ec9-b1fd-3fbfa123b378 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.220158] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 708.220158] env[61545]: value = "task-4255480" [ 708.220158] env[61545]: _type = "Task" [ 708.220158] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.229409] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255480, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.282517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db2a446-79bc-4f8f-b97d-38ace0fc4c44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.315733] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f31f976-d588-44f4-a988-af1405f3c9d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.323300] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.323551] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.324021] env[61545]: INFO nova.compute.manager [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Took 36.03 seconds to build instance. [ 708.345677] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance 'eced4107-b99e-479e-b22c-2157320ecf95' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 708.524505] env[61545]: DEBUG oslo_vmware.api [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Task: {'id': task-4255479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.380302} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.524931] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 708.525074] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 708.525244] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 708.525486] env[61545]: INFO nova.compute.manager [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Took 1.14 seconds to destroy the instance on the hypervisor. [ 708.525827] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 708.526273] env[61545]: DEBUG nova.compute.manager [-] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 708.526408] env[61545]: DEBUG nova.network.neutron [-] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 708.547449] env[61545]: DEBUG nova.network.neutron [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Successfully updated port: f8265baf-2284-40a0-b20a-88199fb2bbda {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 708.619650] env[61545]: ERROR nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [req-411888ca-18be-48e1-bfef-d9a695220302] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-411888ca-18be-48e1-bfef-d9a695220302"}]} [ 708.652140] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 708.678429] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 708.678795] env[61545]: DEBUG nova.compute.provider_tree [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 708.694277] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 708.719076] env[61545]: DEBUG nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 708.741023] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255480, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.827105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-205948f4-05d5-4aa1-b47b-a14d08f677fa tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.692s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.945584] env[61545]: DEBUG nova.network.neutron [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Port d33a6a2d-6310-4263-adf4-dcf09ce72a6b binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 709.055958] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "refresh_cache-26a6b40e-f8a4-4cc6-bdbb-586ca592901c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.055958] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "refresh_cache-26a6b40e-f8a4-4cc6-bdbb-586ca592901c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.055958] env[61545]: DEBUG nova.network.neutron [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 709.235924] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255480, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.329764] env[61545]: DEBUG nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 709.408739] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e30bb9a-de6e-47d5-ad27-763e2b17cb1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.418166] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83919e1b-71dd-4f59-b494-f9cf288afccd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.458919] env[61545]: DEBUG nova.network.neutron [-] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.461177] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab0398b-886e-42ef-a0a2-4b1328b127e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.470350] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d63fb60-8e62-4c10-97f5-3111a8527afa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.489032] env[61545]: DEBUG nova.compute.provider_tree [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 709.617079] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Acquiring lock "26e339f1-182b-4f00-b7c2-a2a32e942d04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.617568] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "26e339f1-182b-4f00-b7c2-a2a32e942d04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.628888] env[61545]: DEBUG nova.network.neutron [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.733790] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255480, 'name': Rename_Task, 'duration_secs': 1.159537} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.734883] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 709.734883] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16dc60de-6aa6-469d-9732-fb7ac13d565a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.741550] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Waiting for the task: (returnval){ [ 709.741550] env[61545]: value = "task-4255481" [ 709.741550] env[61545]: _type = "Task" [ 709.741550] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.754313] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.829120] env[61545]: DEBUG nova.network.neutron [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Updating instance_info_cache with network_info: [{"id": "f8265baf-2284-40a0-b20a-88199fb2bbda", "address": "fa:16:3e:fd:b1:d0", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8265baf-22", "ovs_interfaceid": "f8265baf-2284-40a0-b20a-88199fb2bbda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.838301] env[61545]: DEBUG nova.compute.manager [req-6d361937-235c-45ad-b23e-e99fa3135618 req-01f5f5f8-dbfc-431a-9530-99cd23497f9a service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Received event network-vif-plugged-f8265baf-2284-40a0-b20a-88199fb2bbda {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 709.838553] env[61545]: DEBUG oslo_concurrency.lockutils [req-6d361937-235c-45ad-b23e-e99fa3135618 req-01f5f5f8-dbfc-431a-9530-99cd23497f9a service nova] Acquiring lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.838756] env[61545]: DEBUG oslo_concurrency.lockutils [req-6d361937-235c-45ad-b23e-e99fa3135618 req-01f5f5f8-dbfc-431a-9530-99cd23497f9a service nova] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.839700] env[61545]: DEBUG oslo_concurrency.lockutils [req-6d361937-235c-45ad-b23e-e99fa3135618 req-01f5f5f8-dbfc-431a-9530-99cd23497f9a service nova] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.839700] env[61545]: DEBUG nova.compute.manager [req-6d361937-235c-45ad-b23e-e99fa3135618 req-01f5f5f8-dbfc-431a-9530-99cd23497f9a service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] No waiting events found dispatching network-vif-plugged-f8265baf-2284-40a0-b20a-88199fb2bbda {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.839700] env[61545]: WARNING nova.compute.manager [req-6d361937-235c-45ad-b23e-e99fa3135618 req-01f5f5f8-dbfc-431a-9530-99cd23497f9a service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Received unexpected event network-vif-plugged-f8265baf-2284-40a0-b20a-88199fb2bbda for instance with vm_state building and task_state spawning. [ 709.853740] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.965389] env[61545]: INFO nova.compute.manager [-] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Took 1.44 seconds to deallocate network for instance. [ 710.003928] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "eced4107-b99e-479e-b22c-2157320ecf95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.004345] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.004630] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.013223] env[61545]: ERROR nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [req-42419163-e61f-40d8-9f6c-cebcb40617b5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-42419163-e61f-40d8-9f6c-cebcb40617b5"}]} [ 710.013823] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.088s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.018186] env[61545]: ERROR nova.compute.manager [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Failed to build and run instance: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e (generation 39): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-42419163-e61f-40d8-9f6c-cebcb40617b5"}]} [ 710.018186] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Traceback (most recent call last): [ 710.018186] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 710.018186] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] with self.rt.instance_claim(context, instance, node, allocs, [ 710.018186] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 710.018186] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] return f(*args, **kwargs) [ 710.018186] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 215, in instance_claim [ 710.018186] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] self._update(elevated, cn) [ 710.018186] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] self._update_to_placement(context, compute_node, startup) [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] raise attempt.get() [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] raise value [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 710.018612] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] self.reportclient.update_from_provider_tree( [ 710.019027] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 710.019027] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] self.set_inventory_for_provider( [ 710.019027] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 710.019027] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] raise exception.ResourceProviderUpdateConflict( [ 710.019027] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e (generation 39): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-42419163-e61f-40d8-9f6c-cebcb40617b5"}]} [ 710.019027] env[61545]: ERROR nova.compute.manager [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] [ 710.019027] env[61545]: DEBUG nova.compute.utils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] A conflict was encountered attempting to update resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e (generation 39): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource pro {{(pid=61545) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 710.020008] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.828s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.020241] env[61545]: DEBUG nova.objects.instance [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lazy-loading 'resources' on Instance uuid 579fb20b-083f-4227-9a13-c0f1ea36e272 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 710.027664] env[61545]: DEBUG nova.compute.manager [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Build of instance 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d was re-scheduled: A conflict was encountered attempting to update resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e (generation 39): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-42419163-e61f-40d8-9f6c-cebcb40617b5"}]} {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 710.028088] env[61545]: DEBUG nova.compute.manager [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Unplugging VIFs for instance {{(pid=61545) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 710.028330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Acquiring lock "refresh_cache-1bf6b40d-8b9d-4e3e-8dda-9170d88e002d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.028501] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Acquired lock "refresh_cache-1bf6b40d-8b9d-4e3e-8dda-9170d88e002d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.028667] env[61545]: DEBUG nova.network.neutron [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.252920] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255481, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.316592] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "8d838d3b-32ad-4bb2-839e-6bd81c363447" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.316950] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "8d838d3b-32ad-4bb2-839e-6bd81c363447" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.332907] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "refresh_cache-26a6b40e-f8a4-4cc6-bdbb-586ca592901c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.333310] env[61545]: DEBUG nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Instance network_info: |[{"id": "f8265baf-2284-40a0-b20a-88199fb2bbda", "address": "fa:16:3e:fd:b1:d0", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8265baf-22", "ovs_interfaceid": "f8265baf-2284-40a0-b20a-88199fb2bbda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 710.334090] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:b1:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d0c6fd7-3cc9-4818-9475-8f15900394cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8265baf-2284-40a0-b20a-88199fb2bbda', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.345526] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating folder: Project (9007a6e389c0467c8e2077309984eaab). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 710.346132] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60d0f5dd-cbea-42d8-a332-6d0db6255233 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.360041] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Created folder: Project (9007a6e389c0467c8e2077309984eaab) in parent group-v838542. [ 710.360858] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating folder: Instances. Parent ref: group-v838597. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 710.361176] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98590e54-2941-4585-8947-c176a52aa16e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.371855] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Created folder: Instances in parent group-v838597. [ 710.372196] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 710.372482] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 710.372750] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d316e527-cfee-4c43-9ec3-00b203984ac7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.396864] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.396864] env[61545]: value = "task-4255484" [ 710.396864] env[61545]: _type = "Task" [ 710.396864] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.405862] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255484, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.490673] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.542235] env[61545]: DEBUG nova.scheduler.client.report [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 710.559887] env[61545]: DEBUG nova.scheduler.client.report [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 710.560164] env[61545]: DEBUG nova.compute.provider_tree [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 710.575146] env[61545]: DEBUG nova.scheduler.client.report [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 710.597448] env[61545]: DEBUG nova.scheduler.client.report [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 710.600590] env[61545]: DEBUG nova.network.neutron [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.759517] env[61545]: DEBUG oslo_vmware.api [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Task: {'id': task-4255481, 'name': PowerOnVM_Task, 'duration_secs': 0.527652} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.760267] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 710.760519] env[61545]: DEBUG nova.compute.manager [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 710.762053] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcbb418-98dd-4796-a180-06cb3b85b738 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.844918] env[61545]: DEBUG nova.network.neutron [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.907675] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255484, 'name': CreateVM_Task, 'duration_secs': 0.508463} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.907842] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 710.908690] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.909083] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.909221] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 710.912258] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ff6bbc0-7312-4085-bdd5-b13505782e1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.918327] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 710.918327] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb8130-8824-ef12-bbdc-2d3f4b75b998" [ 710.918327] env[61545]: _type = "Task" [ 710.918327] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.927109] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb8130-8824-ef12-bbdc-2d3f4b75b998, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.079160] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.079160] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.079160] env[61545]: DEBUG nova.network.neutron [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.129833] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb407a1-bc4c-443b-8dc7-b16ea91b97ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.138147] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bf6fbb-db24-460f-8720-8949cf78add6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.171944] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29845a69-2423-48cc-9590-782c0048d4f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.180034] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f93bf1-f078-4355-9242-8260471e3ed0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.195476] env[61545]: DEBUG nova.compute.provider_tree [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 711.289082] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.345495] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Releasing lock "refresh_cache-1bf6b40d-8b9d-4e3e-8dda-9170d88e002d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.345817] env[61545]: DEBUG nova.compute.manager [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61545) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 711.346017] env[61545]: DEBUG nova.compute.manager [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 711.346191] env[61545]: DEBUG nova.network.neutron [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.430342] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb8130-8824-ef12-bbdc-2d3f4b75b998, 'name': SearchDatastore_Task, 'duration_secs': 0.014407} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.431225] env[61545]: DEBUG nova.network.neutron [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.432561] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.432782] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.433022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.433168] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.433437] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.433586] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-312de557-de21-4835-9954-efba23c62491 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.445446] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.445689] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 711.447706] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2d7ab63-37d7-4c3a-a6a9-bb18b5884296 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.457779] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 711.457779] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52319887-1054-f19c-9eaf-bff6d2f6a970" [ 711.457779] env[61545]: _type = "Task" [ 711.457779] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.468939] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52319887-1054-f19c-9eaf-bff6d2f6a970, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.736411] env[61545]: DEBUG nova.scheduler.client.report [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 41 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 711.736732] env[61545]: DEBUG nova.compute.provider_tree [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 41 to 42 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 711.736980] env[61545]: DEBUG nova.compute.provider_tree [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 711.938535] env[61545]: DEBUG nova.network.neutron [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.971930] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52319887-1054-f19c-9eaf-bff6d2f6a970, 'name': SearchDatastore_Task, 'duration_secs': 0.013081} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.972835] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2375ae79-4ae3-4d69-ba13-6624a0e95f32 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.981354] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 711.981354] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5297207e-3881-17af-88a2-972a4c7bbfbc" [ 711.981354] env[61545]: _type = "Task" [ 711.981354] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.993695] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5297207e-3881-17af-88a2-972a4c7bbfbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.012351] env[61545]: DEBUG nova.network.neutron [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance_info_cache with network_info: [{"id": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "address": "fa:16:3e:1f:b2:06", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd33a6a2d-63", "ovs_interfaceid": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.244709] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.224s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.246996] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.583s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.248717] env[61545]: INFO nova.compute.claims [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.283716] env[61545]: INFO nova.scheduler.client.report [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Deleted allocations for instance 579fb20b-083f-4227-9a13-c0f1ea36e272 [ 712.442317] env[61545]: INFO nova.compute.manager [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] [instance: 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d] Took 1.10 seconds to deallocate network for instance. [ 712.495776] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5297207e-3881-17af-88a2-972a4c7bbfbc, 'name': SearchDatastore_Task, 'duration_secs': 0.028476} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.497468] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.497802] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 26a6b40e-f8a4-4cc6-bdbb-586ca592901c/26a6b40e-f8a4-4cc6-bdbb-586ca592901c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 712.498886] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcba8fde-ea2f-49ad-a124-2cc2c8a4cfdf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.509850] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 712.509850] env[61545]: value = "task-4255485" [ 712.509850] env[61545]: _type = "Task" [ 712.509850] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.520043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.524331] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.741253] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "5719daa8-a5bc-4604-b465-a57097695c6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.741581] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "5719daa8-a5bc-4604-b465-a57097695c6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.741905] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "5719daa8-a5bc-4604-b465-a57097695c6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.742196] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "5719daa8-a5bc-4604-b465-a57097695c6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.742473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "5719daa8-a5bc-4604-b465-a57097695c6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.749383] env[61545]: INFO nova.compute.manager [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Terminating instance [ 712.751374] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.751667] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.762248] env[61545]: DEBUG nova.compute.manager [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Received event network-changed-f8265baf-2284-40a0-b20a-88199fb2bbda {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 712.762482] env[61545]: DEBUG nova.compute.manager [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Refreshing instance network info cache due to event network-changed-f8265baf-2284-40a0-b20a-88199fb2bbda. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 712.764020] env[61545]: DEBUG oslo_concurrency.lockutils [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] Acquiring lock "refresh_cache-26a6b40e-f8a4-4cc6-bdbb-586ca592901c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.764020] env[61545]: DEBUG oslo_concurrency.lockutils [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] Acquired lock "refresh_cache-26a6b40e-f8a4-4cc6-bdbb-586ca592901c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.764020] env[61545]: DEBUG nova.network.neutron [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Refreshing network info cache for port f8265baf-2284-40a0-b20a-88199fb2bbda {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.793211] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b78a044e-4ecd-4439-b8b0-46a66da3dc33 tempest-ServerDiagnosticsV248Test-1614193946 tempest-ServerDiagnosticsV248Test-1614193946-project-member] Lock "579fb20b-083f-4227-9a13-c0f1ea36e272" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.933s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.021254] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255485, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.065740] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c54241-5aa0-48d3-8e1f-0601388d20cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.088390] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c231bc8-d383-42b3-a1cc-15bd3a006eb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.096619] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance 'eced4107-b99e-479e-b22c-2157320ecf95' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 713.254754] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "refresh_cache-5719daa8-a5bc-4604-b465-a57097695c6d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.255662] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquired lock "refresh_cache-5719daa8-a5bc-4604-b465-a57097695c6d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.255662] env[61545]: DEBUG nova.network.neutron [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.495664] env[61545]: INFO nova.scheduler.client.report [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Deleted allocations for instance 1bf6b40d-8b9d-4e3e-8dda-9170d88e002d [ 713.529663] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532272} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.530546] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 26a6b40e-f8a4-4cc6-bdbb-586ca592901c/26a6b40e-f8a4-4cc6-bdbb-586ca592901c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 713.531376] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 713.531376] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c1e743c-b0f3-4263-8494-faccf2ca1310 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.542891] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 713.542891] env[61545]: value = "task-4255486" [ 713.542891] env[61545]: _type = "Task" [ 713.542891] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.558118] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.603609] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 713.604342] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8cc6ebd-bd50-4129-a429-0bc0dc5315be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.617291] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 713.617291] env[61545]: value = "task-4255487" [ 713.617291] env[61545]: _type = "Task" [ 713.617291] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.626797] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255487, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.818527] env[61545]: DEBUG nova.network.neutron [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.840574] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d71b984-926b-421c-bdd4-6f76726d02be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.860480] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09f85be-c5e6-4788-b8af-c74e782b0abc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.895828] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14526add-ff39-4ca2-a8be-18d0f5cb5ce4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.905579] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f847b9c4-c6d6-46f1-a7fc-26e4dea2a59f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.912606] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.912849] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.927583] env[61545]: DEBUG nova.compute.provider_tree [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 714.006402] env[61545]: DEBUG nova.network.neutron [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.007974] env[61545]: DEBUG oslo_concurrency.lockutils [None req-07a99e98-d50a-495a-8beb-be9314a6ae31 tempest-FloatingIPsAssociationNegativeTestJSON-886229629 tempest-FloatingIPsAssociationNegativeTestJSON-886229629-project-member] Lock "1bf6b40d-8b9d-4e3e-8dda-9170d88e002d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.504s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.053853] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.183591} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.054162] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 714.055503] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb4ea92-71d4-4e3f-bb43-954ee6e71a79 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.081040] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 26a6b40e-f8a4-4cc6-bdbb-586ca592901c/26a6b40e-f8a4-4cc6-bdbb-586ca592901c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 714.081375] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86a04ccc-fa5b-4fb1-8896-4ca1b458818a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.097018] env[61545]: DEBUG nova.network.neutron [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Updated VIF entry in instance network info cache for port f8265baf-2284-40a0-b20a-88199fb2bbda. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 714.097392] env[61545]: DEBUG nova.network.neutron [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Updating instance_info_cache with network_info: [{"id": "f8265baf-2284-40a0-b20a-88199fb2bbda", "address": "fa:16:3e:fd:b1:d0", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8265baf-22", "ovs_interfaceid": "f8265baf-2284-40a0-b20a-88199fb2bbda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.106529] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 714.106529] env[61545]: value = "task-4255488" [ 714.106529] env[61545]: _type = "Task" [ 714.106529] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.116840] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255488, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.128826] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255487, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.457053] env[61545]: ERROR nova.scheduler.client.report [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [req-e99f0480-c58d-4794-a579-6ebd75744ec0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e99f0480-c58d-4794-a579-6ebd75744ec0"}]} [ 714.486374] env[61545]: DEBUG nova.scheduler.client.report [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 714.505999] env[61545]: DEBUG nova.scheduler.client.report [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 714.506191] env[61545]: DEBUG nova.compute.provider_tree [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 714.509552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Releasing lock "refresh_cache-5719daa8-a5bc-4604-b465-a57097695c6d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.510315] env[61545]: DEBUG nova.compute.manager [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 714.510800] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.513321] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2c6237-1e51-44ba-b75c-b67c163efb3c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.518045] env[61545]: DEBUG nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 714.527641] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 714.527641] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76e94d92-cbf8-4c7c-9466-47c2540ee77c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.532067] env[61545]: DEBUG nova.scheduler.client.report [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 714.542698] env[61545]: DEBUG oslo_vmware.api [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 714.542698] env[61545]: value = "task-4255489" [ 714.542698] env[61545]: _type = "Task" [ 714.542698] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.552978] env[61545]: DEBUG oslo_vmware.api [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.565602] env[61545]: DEBUG nova.scheduler.client.report [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 714.601292] env[61545]: DEBUG oslo_concurrency.lockutils [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] Releasing lock "refresh_cache-26a6b40e-f8a4-4cc6-bdbb-586ca592901c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.601292] env[61545]: DEBUG nova.compute.manager [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Received event network-vif-deleted-f3a5e417-6468-4590-aa5e-e24c16364727 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 714.601292] env[61545]: DEBUG nova.compute.manager [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Received event network-changed-53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 714.601292] env[61545]: DEBUG nova.compute.manager [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Refreshing instance network info cache due to event network-changed-53f2ef34-f0c8-46d5-86d1-b21ecc745ad5. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 714.601292] env[61545]: DEBUG oslo_concurrency.lockutils [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] Acquiring lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.601471] env[61545]: DEBUG oslo_concurrency.lockutils [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] Acquired lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.601471] env[61545]: DEBUG nova.network.neutron [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Refreshing network info cache for port 53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 714.619289] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255488, 'name': ReconfigVM_Task, 'duration_secs': 0.451876} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.626473] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 26a6b40e-f8a4-4cc6-bdbb-586ca592901c/26a6b40e-f8a4-4cc6-bdbb-586ca592901c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 714.627657] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04a91805-1880-4a6e-bce4-b32b32007056 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.636506] env[61545]: DEBUG oslo_vmware.api [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255487, 'name': PowerOnVM_Task, 'duration_secs': 0.773684} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.637877] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 714.638094] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7358c2-605b-48f5-a72f-715c998f1e5b tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance 'eced4107-b99e-479e-b22c-2157320ecf95' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 714.643353] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 714.643353] env[61545]: value = "task-4255490" [ 714.643353] env[61545]: _type = "Task" [ 714.643353] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.656937] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255490, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.995530] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "844f01ed-4dae-4e13-9d1c-09a73f413201" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.995703] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.995857] env[61545]: DEBUG nova.compute.manager [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 714.998672] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5e5e78-58bd-4f7f-b286-a3b5033dad22 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.008859] env[61545]: DEBUG nova.compute.manager [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 715.009492] env[61545]: DEBUG nova.objects.instance [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'flavor' on Instance uuid 844f01ed-4dae-4e13-9d1c-09a73f413201 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 715.051467] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.055197] env[61545]: DEBUG oslo_vmware.api [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255489, 'name': PowerOffVM_Task, 'duration_secs': 0.262094} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.055496] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 715.055641] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 715.056887] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6373a14-3b65-4b1d-a4c2-0933ab27bf70 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.090107] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 715.090988] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 715.090988] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Deleting the datastore file [datastore2] 5719daa8-a5bc-4604-b465-a57097695c6d {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 715.091506] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b04f4cb-1f52-4970-8ebb-c0eb04a0b71d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.098936] env[61545]: DEBUG oslo_vmware.api [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for the task: (returnval){ [ 715.098936] env[61545]: value = "task-4255492" [ 715.098936] env[61545]: _type = "Task" [ 715.098936] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.111466] env[61545]: DEBUG oslo_vmware.api [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.128472] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750c7181-3396-4318-8871-5332cabce1dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.137351] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d112726-31c2-4eae-a3ce-144d5e33005f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.180982] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df548c6-d13f-4e4c-8081-788fb6fc8b39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.195382] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ff4571-4a99-4b09-85fe-1809c3a78105 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.200070] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255490, 'name': Rename_Task, 'duration_secs': 0.250435} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.202999] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 715.205338] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6a1aed0-2f23-4c90-8945-d068c42ceae7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.220310] env[61545]: DEBUG nova.compute.provider_tree [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 715.229609] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 715.229609] env[61545]: value = "task-4255493" [ 715.229609] env[61545]: _type = "Task" [ 715.229609] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.240530] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.612391] env[61545]: DEBUG oslo_vmware.api [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Task: {'id': task-4255492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1017} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.612391] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 715.612391] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 715.612391] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.612391] env[61545]: INFO nova.compute.manager [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Took 1.10 seconds to destroy the instance on the hypervisor. [ 715.613017] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.613017] env[61545]: DEBUG nova.compute.manager [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 715.613017] env[61545]: DEBUG nova.network.neutron [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 715.672418] env[61545]: DEBUG nova.network.neutron [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.740609] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255493, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.778535] env[61545]: DEBUG nova.scheduler.client.report [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 44 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 715.778794] env[61545]: DEBUG nova.compute.provider_tree [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 44 to 45 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 715.778986] env[61545]: DEBUG nova.compute.provider_tree [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 715.972091] env[61545]: DEBUG nova.network.neutron [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Updated VIF entry in instance network info cache for port 53f2ef34-f0c8-46d5-86d1-b21ecc745ad5. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 715.972091] env[61545]: DEBUG nova.network.neutron [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Updating instance_info_cache with network_info: [{"id": "53f2ef34-f0c8-46d5-86d1-b21ecc745ad5", "address": "fa:16:3e:c7:9e:5f", "network": {"id": "c24102bb-211e-48c8-bbd1-8f1a28f06f9e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587394794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f598bcded6824792b972dfec9fc0fa22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f2ef34-f0", "ovs_interfaceid": "53f2ef34-f0c8-46d5-86d1-b21ecc745ad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.023717] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 716.023717] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5d72a44-7153-4b25-9641-6a9fe9e64732 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.032281] env[61545]: DEBUG oslo_vmware.api [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 716.032281] env[61545]: value = "task-4255494" [ 716.032281] env[61545]: _type = "Task" [ 716.032281] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.050375] env[61545]: DEBUG oslo_vmware.api [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.175816] env[61545]: DEBUG nova.network.neutron [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.241836] env[61545]: DEBUG oslo_vmware.api [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255493, 'name': PowerOnVM_Task, 'duration_secs': 0.644939} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.242217] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 716.242507] env[61545]: INFO nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Took 10.28 seconds to spawn the instance on the hypervisor. [ 716.242696] env[61545]: DEBUG nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 716.243552] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f108d8-3139-44fb-a987-2de5d2cae15d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.285600] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.038s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.286682] env[61545]: DEBUG nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 716.289414] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.060s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.292027] env[61545]: INFO nova.compute.claims [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 716.474892] env[61545]: DEBUG oslo_concurrency.lockutils [req-5eb77685-374f-4d85-9cfc-2d1149da1294 req-90d737b8-6304-47a0-adf5-c7e44cf62946 service nova] Releasing lock "refresh_cache-e8c954ec-de76-4d3e-9a63-6c30523d5b63" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.552603] env[61545]: DEBUG oslo_vmware.api [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255494, 'name': PowerOffVM_Task, 'duration_secs': 0.429991} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.552603] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 716.552603] env[61545]: DEBUG nova.compute.manager [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 716.554227] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e97f65-6603-4eca-ae2e-88a8a23a3327 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.683347] env[61545]: INFO nova.compute.manager [-] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Took 1.07 seconds to deallocate network for instance. [ 716.770312] env[61545]: INFO nova.compute.manager [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Took 37.12 seconds to build instance. [ 716.800076] env[61545]: DEBUG nova.compute.utils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 716.805598] env[61545]: DEBUG nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 716.806344] env[61545]: DEBUG nova.network.neutron [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 716.929305] env[61545]: DEBUG nova.policy [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87ecc6c8dd334b988b6ca501152d2829', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '904a34121aff449db58eaa92ccfbe556', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 717.068620] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43c0b6ea-938a-450a-bbd4-5cd6d7675ad4 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.073s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.191924] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.275677] env[61545]: DEBUG oslo_concurrency.lockutils [None req-26ece35d-f9eb-42dc-95fe-a7c4da4e92cf tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.645s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.310654] env[61545]: DEBUG nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 717.510246] env[61545]: DEBUG nova.network.neutron [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Successfully created port: a9fecc2f-82bc-401e-b2ac-6d840b6e25be {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.779856] env[61545]: DEBUG nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 717.866116] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7edb79-1144-43d0-90d5-d3073f507e2c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.877943] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebba5fa-d8d6-4001-9ea7-91ce1efa3e64 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.926210] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf3fb4a-fa98-4ef8-b961-ecbd7da9fda7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.936672] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710b63d6-3b43-409a-84ab-2c1765a9b41b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.954079] env[61545]: DEBUG nova.compute.provider_tree [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.314871] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.328240] env[61545]: DEBUG nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 718.376605] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 718.376915] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.382092] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 718.382092] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.382092] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 718.382092] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 718.382092] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 718.382506] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 718.382506] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 718.382506] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 718.382506] env[61545]: DEBUG nova.virt.hardware [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 718.382506] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2de76d-1d30-4fa1-9d7b-629fbc86eca1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.390097] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.390466] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.396687] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f659b944-5ef6-4a6f-b6a0-26af052daf30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.459887] env[61545]: DEBUG nova.scheduler.client.report [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 718.967172] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.967785] env[61545]: DEBUG nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 718.971417] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.676s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.973156] env[61545]: INFO nova.compute.claims [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.391059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "eced4107-b99e-479e-b22c-2157320ecf95" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.391427] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.392451] env[61545]: DEBUG nova.compute.manager [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Going to confirm migration 1 {{(pid=61545) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 719.481201] env[61545]: DEBUG nova.compute.utils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 719.486233] env[61545]: DEBUG nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 719.486575] env[61545]: DEBUG nova.network.neutron [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 719.525841] env[61545]: DEBUG nova.objects.instance [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'flavor' on Instance uuid 844f01ed-4dae-4e13-9d1c-09a73f413201 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 719.546961] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquiring lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.547129] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.607185] env[61545]: DEBUG nova.policy [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f46ceb43ab3d40edbb17fdd7e8f6bd35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e854a184de8e4b02aa3594b81c6d99f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 719.671322] env[61545]: DEBUG nova.network.neutron [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Successfully updated port: a9fecc2f-82bc-401e-b2ac-6d840b6e25be {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.714887] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.715285] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.715542] env[61545]: DEBUG nova.compute.manager [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 719.719021] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df23c4e7-6e54-44b9-a1a1-322120ea7cb8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.726740] env[61545]: DEBUG nova.compute.manager [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 719.727621] env[61545]: DEBUG nova.objects.instance [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lazy-loading 'flavor' on Instance uuid 26a6b40e-f8a4-4cc6-bdbb-586ca592901c {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 719.988378] env[61545]: DEBUG nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 720.006764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.006764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.006764] env[61545]: DEBUG nova.network.neutron [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.006764] env[61545]: DEBUG nova.objects.instance [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lazy-loading 'info_cache' on Instance uuid eced4107-b99e-479e-b22c-2157320ecf95 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 720.036746] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.036930] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.037505] env[61545]: DEBUG nova.network.neutron [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.037505] env[61545]: DEBUG nova.objects.instance [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'info_cache' on Instance uuid 844f01ed-4dae-4e13-9d1c-09a73f413201 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 720.136877] env[61545]: DEBUG nova.network.neutron [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Successfully created port: 44c7a5f7-291c-4f82-ab36-b2612b504432 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.174805] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "refresh_cache-43cf4c96-2c8b-4520-8926-c1be5a87734e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.174925] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquired lock "refresh_cache-43cf4c96-2c8b-4520-8926-c1be5a87734e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.175115] env[61545]: DEBUG nova.network.neutron [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.436179] env[61545]: DEBUG nova.compute.manager [req-02e4b813-5c9f-486b-97fe-2d7d7c55988e req-08c8fd55-cb65-4353-8224-9acf50983bbb service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Received event network-vif-plugged-a9fecc2f-82bc-401e-b2ac-6d840b6e25be {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 720.436179] env[61545]: DEBUG oslo_concurrency.lockutils [req-02e4b813-5c9f-486b-97fe-2d7d7c55988e req-08c8fd55-cb65-4353-8224-9acf50983bbb service nova] Acquiring lock "43cf4c96-2c8b-4520-8926-c1be5a87734e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.436179] env[61545]: DEBUG oslo_concurrency.lockutils [req-02e4b813-5c9f-486b-97fe-2d7d7c55988e req-08c8fd55-cb65-4353-8224-9acf50983bbb service nova] Lock "43cf4c96-2c8b-4520-8926-c1be5a87734e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.436179] env[61545]: DEBUG oslo_concurrency.lockutils [req-02e4b813-5c9f-486b-97fe-2d7d7c55988e req-08c8fd55-cb65-4353-8224-9acf50983bbb service nova] Lock "43cf4c96-2c8b-4520-8926-c1be5a87734e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.436179] env[61545]: DEBUG nova.compute.manager [req-02e4b813-5c9f-486b-97fe-2d7d7c55988e req-08c8fd55-cb65-4353-8224-9acf50983bbb service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] No waiting events found dispatching network-vif-plugged-a9fecc2f-82bc-401e-b2ac-6d840b6e25be {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 720.436690] env[61545]: WARNING nova.compute.manager [req-02e4b813-5c9f-486b-97fe-2d7d7c55988e req-08c8fd55-cb65-4353-8224-9acf50983bbb service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Received unexpected event network-vif-plugged-a9fecc2f-82bc-401e-b2ac-6d840b6e25be for instance with vm_state building and task_state spawning. [ 720.541688] env[61545]: DEBUG nova.objects.base [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Object Instance<844f01ed-4dae-4e13-9d1c-09a73f413201> lazy-loaded attributes: flavor,info_cache {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 720.656117] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfc9e1b-a3b3-4950-b4d5-c239201812f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.665543] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9d6275-a168-4146-94e0-76a118afe04a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.700461] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621f6958-689f-4aaf-b906-84f2540d8832 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.710593] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b8e59d-b09c-4967-98f5-46710d286e4a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.727192] env[61545]: DEBUG nova.compute.provider_tree [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.742318] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 720.742434] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-349fe01f-8e80-4fca-ba17-44e949a51ee2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.746548] env[61545]: DEBUG nova.network.neutron [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.751838] env[61545]: DEBUG oslo_vmware.api [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 720.751838] env[61545]: value = "task-4255495" [ 720.751838] env[61545]: _type = "Task" [ 720.751838] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.764810] env[61545]: DEBUG oslo_vmware.api [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.970362] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "e21de424-8121-4e2f-84c2-8096ba8048cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.970620] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "e21de424-8121-4e2f-84c2-8096ba8048cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.983296] env[61545]: DEBUG nova.network.neutron [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Updating instance_info_cache with network_info: [{"id": "a9fecc2f-82bc-401e-b2ac-6d840b6e25be", "address": "fa:16:3e:77:d3:3d", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fecc2f-82", "ovs_interfaceid": "a9fecc2f-82bc-401e-b2ac-6d840b6e25be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.009858] env[61545]: DEBUG nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 721.035420] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 721.035765] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.035911] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 721.036036] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.036200] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 721.036516] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 721.036776] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 721.036940] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 721.037126] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 721.037289] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 721.037493] env[61545]: DEBUG nova.virt.hardware [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 721.039076] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b08297-35fa-4b48-8c46-29a449122dcf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.050660] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae64127-cd94-4765-9906-baa3827a4dde {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.230977] env[61545]: DEBUG nova.scheduler.client.report [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 721.267969] env[61545]: DEBUG oslo_vmware.api [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255495, 'name': PowerOffVM_Task, 'duration_secs': 0.293718} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.267969] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 721.267969] env[61545]: DEBUG nova.compute.manager [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 721.267969] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840c1c6e-4f19-497f-b83f-4692246baadd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.486346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Releasing lock "refresh_cache-43cf4c96-2c8b-4520-8926-c1be5a87734e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.487436] env[61545]: DEBUG nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Instance network_info: |[{"id": "a9fecc2f-82bc-401e-b2ac-6d840b6e25be", "address": "fa:16:3e:77:d3:3d", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fecc2f-82", "ovs_interfaceid": "a9fecc2f-82bc-401e-b2ac-6d840b6e25be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 721.487578] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:d3:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9fecc2f-82bc-401e-b2ac-6d840b6e25be', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 721.499471] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 721.500399] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 721.502751] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f6191b2-aded-4505-91b3-1545bae62552 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.528609] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 721.528609] env[61545]: value = "task-4255496" [ 721.528609] env[61545]: _type = "Task" [ 721.528609] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.540786] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255496, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.737889] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.738468] env[61545]: DEBUG nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 721.742628] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.289s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.742778] env[61545]: INFO nova.compute.claims [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.787507] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f64ae0bb-272d-42d3-8660-b38daa3ff64b tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.072s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.886586] env[61545]: DEBUG nova.network.neutron [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance_info_cache with network_info: [{"id": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "address": "fa:16:3e:1f:b2:06", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd33a6a2d-63", "ovs_interfaceid": "d33a6a2d-6310-4263-adf4-dcf09ce72a6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.944927] env[61545]: DEBUG nova.network.neutron [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.041769] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255496, 'name': CreateVM_Task, 'duration_secs': 0.350062} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.041963] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 722.042704] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.042862] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.043226] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 722.043561] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87f36106-adb5-4f65-8290-c7107dbe2bd9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.049301] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 722.049301] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5293cfb8-90e4-898d-a7ab-26a3d8a0ea74" [ 722.049301] env[61545]: _type = "Task" [ 722.049301] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.058089] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5293cfb8-90e4-898d-a7ab-26a3d8a0ea74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.248971] env[61545]: DEBUG nova.compute.utils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 722.256973] env[61545]: DEBUG nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 722.257363] env[61545]: DEBUG nova.network.neutron [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.328881] env[61545]: DEBUG nova.network.neutron [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Successfully updated port: 44c7a5f7-291c-4f82-ab36-b2612b504432 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 722.382188] env[61545]: DEBUG nova.policy [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77063880c8814b2cab369e1b582ca410', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f91bd66fdcb4aaaa5fe77aa66959491', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 722.394330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-eced4107-b99e-479e-b22c-2157320ecf95" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.394460] env[61545]: DEBUG nova.objects.instance [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lazy-loading 'migration_context' on Instance uuid eced4107-b99e-479e-b22c-2157320ecf95 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 722.449195] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.561406] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5293cfb8-90e4-898d-a7ab-26a3d8a0ea74, 'name': SearchDatastore_Task, 'duration_secs': 0.01137} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.561722] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.561952] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 722.562201] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.562346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.562522] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 722.562794] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2627fa71-e781-4db3-9ffb-4721fd26fb06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.572233] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 722.572392] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 722.573169] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5492f8b7-34b0-4564-9122-d61e9645732d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.580864] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 722.580864] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52830192-6438-838d-bd86-fe4efd475d63" [ 722.580864] env[61545]: _type = "Task" [ 722.580864] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.592203] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52830192-6438-838d-bd86-fe4efd475d63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.758147] env[61545]: DEBUG nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 722.834241] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "refresh_cache-c1b1ac1a-32da-442d-86ef-d754165f5a81" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.834241] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquired lock "refresh_cache-c1b1ac1a-32da-442d-86ef-d754165f5a81" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.834241] env[61545]: DEBUG nova.network.neutron [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.897170] env[61545]: DEBUG nova.objects.base [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 722.898150] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b88b65b-e73a-42f2-afed-08dfd1e68fde {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.933090] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f32fad56-a4b9-4cce-9ad8-22893d813175 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.941257] env[61545]: DEBUG oslo_vmware.api [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 722.941257] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523b65f3-2e82-56d6-8ce8-2389ce008f28" [ 722.941257] env[61545]: _type = "Task" [ 722.941257] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.956989] env[61545]: DEBUG oslo_vmware.api [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523b65f3-2e82-56d6-8ce8-2389ce008f28, 'name': SearchDatastore_Task, 'duration_secs': 0.008518} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.958116] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.005773] env[61545]: DEBUG nova.network.neutron [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Successfully created port: 041534e2-0ec0-43ea-84f9-d0cfdec6808c {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.093380] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52830192-6438-838d-bd86-fe4efd475d63, 'name': SearchDatastore_Task, 'duration_secs': 0.011082} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.098238] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f3ab2c6-e1cb-445e-8a83-852fd80fc6b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.106047] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 723.106047] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528619d9-e3f0-a22a-745f-7e34a6bd1e8c" [ 723.106047] env[61545]: _type = "Task" [ 723.106047] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.118820] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528619d9-e3f0-a22a-745f-7e34a6bd1e8c, 'name': SearchDatastore_Task, 'duration_secs': 0.010791} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.122354] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.122354] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 43cf4c96-2c8b-4520-8926-c1be5a87734e/43cf4c96-2c8b-4520-8926-c1be5a87734e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 723.122354] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0355407d-616d-4567-944f-9b8dc9a11f87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.128288] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 723.128288] env[61545]: value = "task-4255497" [ 723.128288] env[61545]: _type = "Task" [ 723.128288] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.137477] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255497, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.345810] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c925d93-94ca-4a80-9b51-9ba99f60c1ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.359019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2a5f75-fe56-4dff-94ec-8d8b9486584e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.403126] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f42eabd-8440-4ab7-b4c7-529e894e4c0d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.411249] env[61545]: DEBUG nova.network.neutron [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.419283] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab4fcd5-04c6-4df6-83e2-b2dbba78da76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.441478] env[61545]: DEBUG nova.compute.provider_tree [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.458395] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.458557] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5bd166e-dff6-4358-a0d7-5072cc100d93 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.469086] env[61545]: DEBUG oslo_vmware.api [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 723.469086] env[61545]: value = "task-4255498" [ 723.469086] env[61545]: _type = "Task" [ 723.469086] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.475050] env[61545]: DEBUG nova.compute.manager [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Received event network-changed-a9fecc2f-82bc-401e-b2ac-6d840b6e25be {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 723.475272] env[61545]: DEBUG nova.compute.manager [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Refreshing instance network info cache due to event network-changed-a9fecc2f-82bc-401e-b2ac-6d840b6e25be. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 723.475544] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Acquiring lock "refresh_cache-43cf4c96-2c8b-4520-8926-c1be5a87734e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.475703] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Acquired lock "refresh_cache-43cf4c96-2c8b-4520-8926-c1be5a87734e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.475863] env[61545]: DEBUG nova.network.neutron [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Refreshing network info cache for port a9fecc2f-82bc-401e-b2ac-6d840b6e25be {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.483850] env[61545]: DEBUG oslo_vmware.api [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255498, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.643967] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255497, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.707185] env[61545]: DEBUG nova.network.neutron [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Updating instance_info_cache with network_info: [{"id": "44c7a5f7-291c-4f82-ab36-b2612b504432", "address": "fa:16:3e:9d:76:43", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44c7a5f7-29", "ovs_interfaceid": "44c7a5f7-291c-4f82-ab36-b2612b504432", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.776665] env[61545]: DEBUG nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 723.800900] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 723.801234] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 723.801408] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 723.801601] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 723.801746] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 723.801891] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 723.802120] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 723.802282] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 723.802449] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 723.802613] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 723.802784] env[61545]: DEBUG nova.virt.hardware [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 723.803738] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50ba84c-83af-4225-b928-8a5753c869c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.813146] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67f2f13-8f60-4b7c-a48a-9bcd939a504c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.948894] env[61545]: DEBUG nova.scheduler.client.report [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 723.980955] env[61545]: DEBUG oslo_vmware.api [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255498, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.144187] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255497, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517809} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.144187] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 43cf4c96-2c8b-4520-8926-c1be5a87734e/43cf4c96-2c8b-4520-8926-c1be5a87734e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 724.144187] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 724.145049] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3053f245-1be2-49ad-adbf-689df132dcf9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.154496] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 724.154496] env[61545]: value = "task-4255499" [ 724.154496] env[61545]: _type = "Task" [ 724.154496] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.165340] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.177886] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.178510] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.210982] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Releasing lock "refresh_cache-c1b1ac1a-32da-442d-86ef-d754165f5a81" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.210982] env[61545]: DEBUG nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Instance network_info: |[{"id": "44c7a5f7-291c-4f82-ab36-b2612b504432", "address": "fa:16:3e:9d:76:43", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44c7a5f7-29", "ovs_interfaceid": "44c7a5f7-291c-4f82-ab36-b2612b504432", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 724.211146] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:76:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44c7a5f7-291c-4f82-ab36-b2612b504432', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.222552] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.224297] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 724.227436] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-170d1a43-34a7-4b9f-8175-a8d0bd640393 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.248869] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 724.248869] env[61545]: value = "task-4255500" [ 724.248869] env[61545]: _type = "Task" [ 724.248869] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.261656] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255500, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.387266] env[61545]: DEBUG nova.network.neutron [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Updated VIF entry in instance network info cache for port a9fecc2f-82bc-401e-b2ac-6d840b6e25be. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 724.387864] env[61545]: DEBUG nova.network.neutron [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Updating instance_info_cache with network_info: [{"id": "a9fecc2f-82bc-401e-b2ac-6d840b6e25be", "address": "fa:16:3e:77:d3:3d", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fecc2f-82", "ovs_interfaceid": "a9fecc2f-82bc-401e-b2ac-6d840b6e25be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.454766] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.455321] env[61545]: DEBUG nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 724.459224] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.393s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.459224] env[61545]: DEBUG nova.objects.instance [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 724.482423] env[61545]: DEBUG oslo_vmware.api [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255498, 'name': PowerOnVM_Task, 'duration_secs': 0.60586} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.483246] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.483863] env[61545]: DEBUG nova.compute.manager [None req-de7400e0-77b0-4829-ac84-573ed48e7768 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.484845] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1e8d9d-44dd-42f6-9baa-44f8be39e982 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.534048] env[61545]: DEBUG nova.compute.manager [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.534999] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc04cf8-9aa5-4a69-a509-843409366c8a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.675431] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075532} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.680910] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 724.681914] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c113a25-6412-4af1-8845-b29558276d15 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.693250] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.694871] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.695942] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.708355] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.717915] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 43cf4c96-2c8b-4520-8926-c1be5a87734e/43cf4c96-2c8b-4520-8926-c1be5a87734e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 724.718526] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.718615] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9df59e3-bc40-4a3e-9a24-0eb674caf4bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.733806] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.733806] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 724.734367] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 724.745315] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 724.745315] env[61545]: value = "task-4255501" [ 724.745315] env[61545]: _type = "Task" [ 724.745315] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.764947] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255501, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.767579] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255500, 'name': CreateVM_Task, 'duration_secs': 0.356776} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.767921] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 724.768648] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.768818] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.769149] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 724.769421] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3e170e6-4c81-4098-ad68-062793b9b95f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.778086] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 724.778086] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c675a-5ab4-d58e-65c4-b35c91ba2882" [ 724.778086] env[61545]: _type = "Task" [ 724.778086] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.788682] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c675a-5ab4-d58e-65c4-b35c91ba2882, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.891022] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Releasing lock "refresh_cache-43cf4c96-2c8b-4520-8926-c1be5a87734e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.891326] env[61545]: DEBUG nova.compute.manager [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Received event network-vif-plugged-44c7a5f7-291c-4f82-ab36-b2612b504432 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 724.891551] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Acquiring lock "c1b1ac1a-32da-442d-86ef-d754165f5a81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.891773] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Lock "c1b1ac1a-32da-442d-86ef-d754165f5a81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.891934] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Lock "c1b1ac1a-32da-442d-86ef-d754165f5a81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.892292] env[61545]: DEBUG nova.compute.manager [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] No waiting events found dispatching network-vif-plugged-44c7a5f7-291c-4f82-ab36-b2612b504432 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 724.892376] env[61545]: WARNING nova.compute.manager [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Received unexpected event network-vif-plugged-44c7a5f7-291c-4f82-ab36-b2612b504432 for instance with vm_state building and task_state spawning. [ 724.893044] env[61545]: DEBUG nova.compute.manager [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Received event network-changed-44c7a5f7-291c-4f82-ab36-b2612b504432 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 724.893044] env[61545]: DEBUG nova.compute.manager [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Refreshing instance network info cache due to event network-changed-44c7a5f7-291c-4f82-ab36-b2612b504432. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 724.893196] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Acquiring lock "refresh_cache-c1b1ac1a-32da-442d-86ef-d754165f5a81" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.893343] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Acquired lock "refresh_cache-c1b1ac1a-32da-442d-86ef-d754165f5a81" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.893543] env[61545]: DEBUG nova.network.neutron [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Refreshing network info cache for port 44c7a5f7-291c-4f82-ab36-b2612b504432 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.963598] env[61545]: DEBUG nova.compute.utils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 724.968424] env[61545]: DEBUG nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 724.968424] env[61545]: DEBUG nova.network.neutron [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.049137] env[61545]: INFO nova.compute.manager [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] instance snapshotting [ 725.049402] env[61545]: WARNING nova.compute.manager [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 725.052291] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a842216-107d-4e09-91b0-bac1116f6466 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.074091] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd314a02-5bc6-4fda-bc57-65a262f8905f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.097278] env[61545]: DEBUG nova.policy [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f0e07a33f144aa3be9d0fc99096eefe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf5ed987e5404b629b6014e2b74d69d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 725.196289] env[61545]: DEBUG nova.network.neutron [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Successfully updated port: 041534e2-0ec0-43ea-84f9-d0cfdec6808c {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.242591] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.257368] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255501, 'name': ReconfigVM_Task, 'duration_secs': 0.329105} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.257696] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 43cf4c96-2c8b-4520-8926-c1be5a87734e/43cf4c96-2c8b-4520-8926-c1be5a87734e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 725.258589] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cf79309-6b28-4868-96f5-a1333db00c94 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.267392] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 725.267392] env[61545]: value = "task-4255502" [ 725.267392] env[61545]: _type = "Task" [ 725.267392] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.278036] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255502, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.288704] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c675a-5ab4-d58e-65c4-b35c91ba2882, 'name': SearchDatastore_Task, 'duration_secs': 0.010925} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.288875] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.289204] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 725.289531] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.289742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.290010] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 725.290331] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f99f66e1-149a-425a-9f85-b4e1a1a0da41 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.299471] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 725.299679] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 725.300505] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab51c160-7953-47be-9b0c-e5f5295e3116 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.306961] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 725.306961] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d8952-6db4-4bef-9edb-0c2f6403a776" [ 725.306961] env[61545]: _type = "Task" [ 725.306961] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.318106] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d8952-6db4-4bef-9edb-0c2f6403a776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.474942] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5c89a273-67e6-45d2-ae3a-73341b79563d tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.474942] env[61545]: DEBUG nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 725.479019] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.030s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.481873] env[61545]: INFO nova.compute.claims [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.586409] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 725.586849] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b34dfbb3-7c4b-43ca-a629-3b65ca0d2340 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.597115] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 725.597115] env[61545]: value = "task-4255503" [ 725.597115] env[61545]: _type = "Task" [ 725.597115] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.608245] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255503, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.703234] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquiring lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.703234] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquired lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.703234] env[61545]: DEBUG nova.network.neutron [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.784978] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255502, 'name': Rename_Task, 'duration_secs': 0.142597} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.785522] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 725.785899] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30827fd4-60f4-4c47-80e6-6343dfb3e3fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.796705] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 725.796705] env[61545]: value = "task-4255504" [ 725.796705] env[61545]: _type = "Task" [ 725.796705] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.812718] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.825650] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d8952-6db4-4bef-9edb-0c2f6403a776, 'name': SearchDatastore_Task, 'duration_secs': 0.010095} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.826791] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98197597-f7dd-40b6-abe5-0d134f6ea56e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.836084] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 725.836084] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b8316e-3dc6-9fa7-4c05-bcfb31af2e06" [ 725.836084] env[61545]: _type = "Task" [ 725.836084] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.848820] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b8316e-3dc6-9fa7-4c05-bcfb31af2e06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.109427] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255503, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.226232] env[61545]: DEBUG nova.network.neutron [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Updated VIF entry in instance network info cache for port 44c7a5f7-291c-4f82-ab36-b2612b504432. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 726.226232] env[61545]: DEBUG nova.network.neutron [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Updating instance_info_cache with network_info: [{"id": "44c7a5f7-291c-4f82-ab36-b2612b504432", "address": "fa:16:3e:9d:76:43", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44c7a5f7-29", "ovs_interfaceid": "44c7a5f7-291c-4f82-ab36-b2612b504432", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.231786] env[61545]: DEBUG nova.network.neutron [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Successfully created port: 2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.308939] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255504, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.309858] env[61545]: DEBUG nova.network.neutron [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.350708] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b8316e-3dc6-9fa7-4c05-bcfb31af2e06, 'name': SearchDatastore_Task, 'duration_secs': 0.01366} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.351481] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.351760] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c1b1ac1a-32da-442d-86ef-d754165f5a81/c1b1ac1a-32da-442d-86ef-d754165f5a81.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 726.352064] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a93b4cb7-c9aa-4235-a92c-787391a66306 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.360927] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 726.360927] env[61545]: value = "task-4255505" [ 726.360927] env[61545]: _type = "Task" [ 726.360927] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.371674] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255505, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.495341] env[61545]: DEBUG nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 726.535226] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 726.535226] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.535404] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 726.535818] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.535818] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 726.535942] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 726.536305] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 726.536305] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 726.536468] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 726.537588] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 726.537588] env[61545]: DEBUG nova.virt.hardware [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 726.537795] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a67512-283e-4329-bac6-a1badf6332f0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.548731] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c8ca32-497e-46c5-8cfc-2ec5d4f78c82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.615019] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255503, 'name': CreateSnapshot_Task, 'duration_secs': 0.695817} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.615019] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 726.615019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6094797-0d32-4a42-8391-1714fcf8f1ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.727180] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4a94ef8-aa7e-4a27-b54b-219ae919c327 req-94876091-04e3-4075-b3b0-6c51e10219ef service nova] Releasing lock "refresh_cache-c1b1ac1a-32da-442d-86ef-d754165f5a81" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.763658] env[61545]: DEBUG nova.compute.manager [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Received event network-vif-plugged-041534e2-0ec0-43ea-84f9-d0cfdec6808c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 726.763658] env[61545]: DEBUG oslo_concurrency.lockutils [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] Acquiring lock "9dbff26a-210c-4e80-812f-c91debe3e9c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.763658] env[61545]: DEBUG oslo_concurrency.lockutils [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] Lock "9dbff26a-210c-4e80-812f-c91debe3e9c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.763658] env[61545]: DEBUG oslo_concurrency.lockutils [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] Lock "9dbff26a-210c-4e80-812f-c91debe3e9c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.763658] env[61545]: DEBUG nova.compute.manager [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] No waiting events found dispatching network-vif-plugged-041534e2-0ec0-43ea-84f9-d0cfdec6808c {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 726.764225] env[61545]: WARNING nova.compute.manager [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Received unexpected event network-vif-plugged-041534e2-0ec0-43ea-84f9-d0cfdec6808c for instance with vm_state building and task_state spawning. [ 726.764225] env[61545]: DEBUG nova.compute.manager [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Received event network-changed-041534e2-0ec0-43ea-84f9-d0cfdec6808c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 726.764225] env[61545]: DEBUG nova.compute.manager [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Refreshing instance network info cache due to event network-changed-041534e2-0ec0-43ea-84f9-d0cfdec6808c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 726.764225] env[61545]: DEBUG oslo_concurrency.lockutils [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] Acquiring lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.815304] env[61545]: DEBUG oslo_vmware.api [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255504, 'name': PowerOnVM_Task, 'duration_secs': 0.747007} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.815757] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 726.815997] env[61545]: INFO nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Took 8.49 seconds to spawn the instance on the hypervisor. [ 726.816196] env[61545]: DEBUG nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 726.817580] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bd8244-6fd9-42c8-b197-632edac6b616 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.878807] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255505, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.115067] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0479e95-f695-4dcc-93e3-1757e9c7f218 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.124734] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20105656-049a-4a00-94ad-c38f8826a425 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.139941] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 727.140738] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3edb2b44-c3bd-40d5-9542-1c143fa69b00 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.178036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c123e39-b005-42f3-a581-2f7b12d41d68 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.180825] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 727.180825] env[61545]: value = "task-4255506" [ 727.180825] env[61545]: _type = "Task" [ 727.180825] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.188123] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651cf1cd-d76c-41b3-ab40-4cd48173e8b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.195367] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255506, 'name': CloneVM_Task} progress is 12%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.206083] env[61545]: DEBUG nova.compute.provider_tree [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 727.208498] env[61545]: DEBUG nova.network.neutron [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Updating instance_info_cache with network_info: [{"id": "041534e2-0ec0-43ea-84f9-d0cfdec6808c", "address": "fa:16:3e:4d:e5:00", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041534e2-0e", "ovs_interfaceid": "041534e2-0ec0-43ea-84f9-d0cfdec6808c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.344417] env[61545]: INFO nova.compute.manager [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Took 37.72 seconds to build instance. [ 727.380017] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255505, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747259} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.380017] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c1b1ac1a-32da-442d-86ef-d754165f5a81/c1b1ac1a-32da-442d-86ef-d754165f5a81.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.380255] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.380524] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2daf8633-2ed7-463e-b29b-c21e7d8f418b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.391691] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 727.391691] env[61545]: value = "task-4255507" [ 727.391691] env[61545]: _type = "Task" [ 727.391691] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.403414] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255507, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.693711] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255506, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.716513] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Releasing lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.716513] env[61545]: DEBUG nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Instance network_info: |[{"id": "041534e2-0ec0-43ea-84f9-d0cfdec6808c", "address": "fa:16:3e:4d:e5:00", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041534e2-0e", "ovs_interfaceid": "041534e2-0ec0-43ea-84f9-d0cfdec6808c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 727.716989] env[61545]: DEBUG oslo_concurrency.lockutils [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] Acquired lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.716989] env[61545]: DEBUG nova.network.neutron [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Refreshing network info cache for port 041534e2-0ec0-43ea-84f9-d0cfdec6808c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.716989] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:e5:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '041534e2-0ec0-43ea-84f9-d0cfdec6808c', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.727669] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Creating folder: Project (8f91bd66fdcb4aaaa5fe77aa66959491). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.727669] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ede65cfa-46db-476b-b5ea-4686b6c3fb04 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.735482] env[61545]: ERROR nova.scheduler.client.report [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [req-77f5c43b-8210-4975-af12-d20fab1e941b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-77f5c43b-8210-4975-af12-d20fab1e941b"}]} [ 727.742704] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Created folder: Project (8f91bd66fdcb4aaaa5fe77aa66959491) in parent group-v838542. [ 727.742923] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Creating folder: Instances. Parent ref: group-v838604. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.743237] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea693b58-22cc-44dd-8f19-3431412991a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.756037] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Created folder: Instances in parent group-v838604. [ 727.756037] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 727.756037] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.756407] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12d55017-df21-4684-840c-ccbeec2945c3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.775601] env[61545]: DEBUG nova.scheduler.client.report [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 727.785919] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.785919] env[61545]: value = "task-4255510" [ 727.785919] env[61545]: _type = "Task" [ 727.785919] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.804736] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255510, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.805652] env[61545]: DEBUG nova.scheduler.client.report [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 727.805917] env[61545]: DEBUG nova.compute.provider_tree [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 727.828278] env[61545]: DEBUG nova.scheduler.client.report [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 727.848014] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c42426e-d2b4-46e0-b2a4-efd2bad360c0 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "43cf4c96-2c8b-4520-8926-c1be5a87734e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.235s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.851309] env[61545]: DEBUG nova.scheduler.client.report [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 727.909820] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255507, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082044} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.916157] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.923354] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432289d7-282e-427b-9f86-263f4a0170a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.959537] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] c1b1ac1a-32da-442d-86ef-d754165f5a81/c1b1ac1a-32da-442d-86ef-d754165f5a81.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.964443] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d9f0f36-28f0-4b50-a2d7-c20594c1dd00 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.998123] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 727.998123] env[61545]: value = "task-4255511" [ 727.998123] env[61545]: _type = "Task" [ 727.998123] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.009991] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255511, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.196121] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255506, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.298793] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255510, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.356443] env[61545]: DEBUG nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 728.515806] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255511, 'name': ReconfigVM_Task, 'duration_secs': 0.340006} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.515806] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Reconfigured VM instance instance-00000014 to attach disk [datastore2] c1b1ac1a-32da-442d-86ef-d754165f5a81/c1b1ac1a-32da-442d-86ef-d754165f5a81.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.516371] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9e32f3f-d4d4-4507-b36c-952e99c9216a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.520954] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e7925d-50e6-4d35-b6b6-07212fdc955f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.525874] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 728.525874] env[61545]: value = "task-4255512" [ 728.525874] env[61545]: _type = "Task" [ 728.525874] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.535024] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0c2860-4cde-4702-8c38-a7a98f26f4d7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.540073] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255512, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.577788] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e98e978-8e53-4bd0-b955-5ed9ce712faa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.587109] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfcb780-ea45-40a5-8522-f1d1b357bd6c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.604062] env[61545]: DEBUG nova.compute.provider_tree [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 728.697869] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255506, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.799933] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255510, 'name': CreateVM_Task, 'duration_secs': 0.761925} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.800275] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 728.801039] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.801107] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.801446] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 728.801808] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46c5be00-30f0-4145-9f75-71b8c9c55b95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.812022] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 728.812022] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529fd6d8-315d-52c2-06da-cc8aad05ad8f" [ 728.812022] env[61545]: _type = "Task" [ 728.812022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.819540] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529fd6d8-315d-52c2-06da-cc8aad05ad8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.880209] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.036025] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255512, 'name': Rename_Task, 'duration_secs': 0.163322} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.036527] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.036821] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eea27fe7-45ab-4ee1-88ed-de62bdae8a20 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.045590] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 729.045590] env[61545]: value = "task-4255513" [ 729.045590] env[61545]: _type = "Task" [ 729.045590] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.055432] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.145315] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57de7f88-6dd7-4292-9357-1a5389d908be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.150682] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ca1a00-b3de-447e-b82a-c7465d950cb9 tempest-ServersAdminNegativeTestJSON-1111137138 tempest-ServersAdminNegativeTestJSON-1111137138-project-admin] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Suspending the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 729.152352] env[61545]: DEBUG nova.scheduler.client.report [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 48 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 729.152888] env[61545]: DEBUG nova.compute.provider_tree [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 48 to 49 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 729.153462] env[61545]: DEBUG nova.compute.provider_tree [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 729.159860] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-00b00e5a-cf2e-4c9f-9625-b3083004a6e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.168024] env[61545]: DEBUG oslo_vmware.api [None req-f8ca1a00-b3de-447e-b82a-c7465d950cb9 tempest-ServersAdminNegativeTestJSON-1111137138 tempest-ServersAdminNegativeTestJSON-1111137138-project-admin] Waiting for the task: (returnval){ [ 729.168024] env[61545]: value = "task-4255514" [ 729.168024] env[61545]: _type = "Task" [ 729.168024] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.183962] env[61545]: DEBUG oslo_vmware.api [None req-f8ca1a00-b3de-447e-b82a-c7465d950cb9 tempest-ServersAdminNegativeTestJSON-1111137138 tempest-ServersAdminNegativeTestJSON-1111137138-project-admin] Task: {'id': task-4255514, 'name': SuspendVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.196903] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255506, 'name': CloneVM_Task, 'duration_secs': 1.898844} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.197241] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Created linked-clone VM from snapshot [ 729.198211] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae3af48-fb64-423a-8a85-b7d30045bca1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.209016] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Uploading image 68473fee-5958-47fb-a73e-d1afcfee7aae {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 729.243221] env[61545]: DEBUG oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 729.243221] env[61545]: value = "vm-838603" [ 729.243221] env[61545]: _type = "VirtualMachine" [ 729.243221] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 729.243588] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9398bbad-9638-4497-915d-69b67977e2b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.253862] env[61545]: DEBUG oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lease: (returnval){ [ 729.253862] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f769e-e943-1ea5-cc76-6f910ea7120f" [ 729.253862] env[61545]: _type = "HttpNfcLease" [ 729.253862] env[61545]: } obtained for exporting VM: (result){ [ 729.253862] env[61545]: value = "vm-838603" [ 729.253862] env[61545]: _type = "VirtualMachine" [ 729.253862] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 729.254284] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the lease: (returnval){ [ 729.254284] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f769e-e943-1ea5-cc76-6f910ea7120f" [ 729.254284] env[61545]: _type = "HttpNfcLease" [ 729.254284] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 729.264436] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 729.264436] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f769e-e943-1ea5-cc76-6f910ea7120f" [ 729.264436] env[61545]: _type = "HttpNfcLease" [ 729.264436] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 729.326190] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529fd6d8-315d-52c2-06da-cc8aad05ad8f, 'name': SearchDatastore_Task, 'duration_secs': 0.052196} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.330028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.330028] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 729.330028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.330028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.330518] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 729.330518] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c49067d-82eb-41be-8bf9-2fcdcd52d04a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.345119] env[61545]: DEBUG nova.network.neutron [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Updated VIF entry in instance network info cache for port 041534e2-0ec0-43ea-84f9-d0cfdec6808c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 729.345609] env[61545]: DEBUG nova.network.neutron [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Updating instance_info_cache with network_info: [{"id": "041534e2-0ec0-43ea-84f9-d0cfdec6808c", "address": "fa:16:3e:4d:e5:00", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041534e2-0e", "ovs_interfaceid": "041534e2-0ec0-43ea-84f9-d0cfdec6808c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.350999] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.351407] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 729.352128] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f52cc20-152d-4de7-a0a8-9922c052051e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.360608] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 729.360608] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f14f85-72b5-47eb-3ad6-616fbf095a20" [ 729.360608] env[61545]: _type = "Task" [ 729.360608] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.372661] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f14f85-72b5-47eb-3ad6-616fbf095a20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.560171] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255513, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.668018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.183s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.668018] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 729.670105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.736s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.675502] env[61545]: DEBUG nova.objects.instance [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lazy-loading 'resources' on Instance uuid 4f879b20-bae0-4d50-b5e9-378356341962 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 729.690196] env[61545]: DEBUG oslo_vmware.api [None req-f8ca1a00-b3de-447e-b82a-c7465d950cb9 tempest-ServersAdminNegativeTestJSON-1111137138 tempest-ServersAdminNegativeTestJSON-1111137138-project-admin] Task: {'id': task-4255514, 'name': SuspendVM_Task} progress is 54%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.766520] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 729.766520] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f769e-e943-1ea5-cc76-6f910ea7120f" [ 729.766520] env[61545]: _type = "HttpNfcLease" [ 729.766520] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 729.766885] env[61545]: DEBUG oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 729.766885] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f769e-e943-1ea5-cc76-6f910ea7120f" [ 729.766885] env[61545]: _type = "HttpNfcLease" [ 729.766885] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 729.767982] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4095717-1f71-460b-a109-fd6fcc440904 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.783881] env[61545]: DEBUG oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b6e2b2-a1cd-dc62-24f7-292b41d99be9/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 729.784328] env[61545]: DEBUG oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b6e2b2-a1cd-dc62-24f7-292b41d99be9/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 729.887405] env[61545]: DEBUG oslo_concurrency.lockutils [req-a7b8eda3-646e-4b0b-acd0-2592eb80e703 req-4c9fa0cb-3236-4293-bf64-5176962eff7f service nova] Releasing lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.904775] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f14f85-72b5-47eb-3ad6-616fbf095a20, 'name': SearchDatastore_Task, 'duration_secs': 0.025567} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.905638] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7f8c303-c727-4d55-89b2-bd094cd8728b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.915087] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 729.915087] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5285cfb4-c049-9545-600d-3b557cb4099c" [ 729.915087] env[61545]: _type = "Task" [ 729.915087] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.930035] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5285cfb4-c049-9545-600d-3b557cb4099c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.957606] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9e38316f-336a-4800-bbe9-72a652d577ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.020207] env[61545]: DEBUG nova.network.neutron [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Successfully updated port: 2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.058069] env[61545]: DEBUG oslo_vmware.api [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255513, 'name': PowerOnVM_Task, 'duration_secs': 0.771303} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.058069] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.058069] env[61545]: INFO nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Took 9.05 seconds to spawn the instance on the hypervisor. [ 730.058069] env[61545]: DEBUG nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.058069] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a214adf-2c7b-4236-9362-85ae471af75b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.186994] env[61545]: DEBUG nova.compute.utils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 730.199123] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 730.199304] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.214631] env[61545]: DEBUG oslo_vmware.api [None req-f8ca1a00-b3de-447e-b82a-c7465d950cb9 tempest-ServersAdminNegativeTestJSON-1111137138 tempest-ServersAdminNegativeTestJSON-1111137138-project-admin] Task: {'id': task-4255514, 'name': SuspendVM_Task} progress is 54%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.402224] env[61545]: DEBUG nova.policy [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4943bc31b1c4f4396688c44c677db0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d2daf4ae4164d9c83882d0e64124316', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 730.438070] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5285cfb4-c049-9545-600d-3b557cb4099c, 'name': SearchDatastore_Task, 'duration_secs': 0.017534} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.438842] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.440822] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 9dbff26a-210c-4e80-812f-c91debe3e9c1/9dbff26a-210c-4e80-812f-c91debe3e9c1.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 730.448099] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5a4ad2b-ea5d-40cd-9062-f09c4346f7a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.461234] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 730.461234] env[61545]: value = "task-4255516" [ 730.461234] env[61545]: _type = "Task" [ 730.461234] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.482613] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255516, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.527858] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.529066] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquired lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.529066] env[61545]: DEBUG nova.network.neutron [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.586487] env[61545]: INFO nova.compute.manager [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Took 38.39 seconds to build instance. [ 730.699377] env[61545]: DEBUG oslo_vmware.api [None req-f8ca1a00-b3de-447e-b82a-c7465d950cb9 tempest-ServersAdminNegativeTestJSON-1111137138 tempest-ServersAdminNegativeTestJSON-1111137138-project-admin] Task: {'id': task-4255514, 'name': SuspendVM_Task, 'duration_secs': 1.107542} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.701147] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ca1a00-b3de-447e-b82a-c7465d950cb9 tempest-ServersAdminNegativeTestJSON-1111137138 tempest-ServersAdminNegativeTestJSON-1111137138-project-admin] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Suspended the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 730.701628] env[61545]: DEBUG nova.compute.manager [None req-f8ca1a00-b3de-447e-b82a-c7465d950cb9 tempest-ServersAdminNegativeTestJSON-1111137138 tempest-ServersAdminNegativeTestJSON-1111137138-project-admin] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.706236] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072460c5-bd10-45a1-89b1-67df1bc1aaba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.709860] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 730.975622] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255516, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.000665] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0714e312-d7cf-4ac2-b2de-d9b7f89e0ded {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.010779] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3069a0c-8ce8-40e0-a178-0de1f5bc5495 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.051843] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8926b228-6815-4dbf-ba84-5547b40d493b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.062734] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bb33e0-6514-434d-893f-e6f3d4d58cf3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.081379] env[61545]: DEBUG nova.compute.provider_tree [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.092458] env[61545]: DEBUG oslo_concurrency.lockutils [None req-abc39ded-3e5d-4e5d-9aae-534aa17c1993 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "c1b1ac1a-32da-442d-86ef-d754165f5a81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.903s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.117023] env[61545]: DEBUG nova.network.neutron [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.188785] env[61545]: DEBUG nova.compute.manager [req-7afb73b7-20c8-474c-91fd-e202941b5269 req-025e063a-e0f8-4706-b12a-611aec995e04 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Received event network-vif-plugged-2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 731.188785] env[61545]: DEBUG oslo_concurrency.lockutils [req-7afb73b7-20c8-474c-91fd-e202941b5269 req-025e063a-e0f8-4706-b12a-611aec995e04 service nova] Acquiring lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.192728] env[61545]: DEBUG oslo_concurrency.lockutils [req-7afb73b7-20c8-474c-91fd-e202941b5269 req-025e063a-e0f8-4706-b12a-611aec995e04 service nova] Lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.004s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.192987] env[61545]: DEBUG oslo_concurrency.lockutils [req-7afb73b7-20c8-474c-91fd-e202941b5269 req-025e063a-e0f8-4706-b12a-611aec995e04 service nova] Lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.193573] env[61545]: DEBUG nova.compute.manager [req-7afb73b7-20c8-474c-91fd-e202941b5269 req-025e063a-e0f8-4706-b12a-611aec995e04 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] No waiting events found dispatching network-vif-plugged-2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 731.194546] env[61545]: WARNING nova.compute.manager [req-7afb73b7-20c8-474c-91fd-e202941b5269 req-025e063a-e0f8-4706-b12a-611aec995e04 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Received unexpected event network-vif-plugged-2b9f3635-8a28-4d33-be62-134aabc38027 for instance with vm_state building and task_state spawning. [ 731.370394] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Successfully created port: 0c7a22d0-b4ff-436d-bb77-faa9c853dd35 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 731.482721] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255516, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530439} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.484514] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 9dbff26a-210c-4e80-812f-c91debe3e9c1/9dbff26a-210c-4e80-812f-c91debe3e9c1.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 731.484514] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 731.484514] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e6bc63f-d16a-4dc7-b768-250bb3b36d38 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.496239] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 731.496239] env[61545]: value = "task-4255517" [ 731.496239] env[61545]: _type = "Task" [ 731.496239] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.511725] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.530030] env[61545]: DEBUG nova.network.neutron [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.585321] env[61545]: DEBUG nova.scheduler.client.report [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.596359] env[61545]: DEBUG nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 731.732368] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 731.762375] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 731.762929] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.763026] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 731.763160] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.763327] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 731.763471] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 731.763687] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 731.763840] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 731.764057] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 731.764248] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 731.764436] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 731.765338] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c5ee29-cee6-4f41-b115-7bd3e7b111d7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.775311] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44744d3e-65a9-4af0-a33e-e49e387d13a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.007524] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095062} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.007949] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 732.008850] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa76425d-9e5d-4662-9dd5-56d8f0d4faf1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.037841] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 9dbff26a-210c-4e80-812f-c91debe3e9c1/9dbff26a-210c-4e80-812f-c91debe3e9c1.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 732.038436] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Releasing lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.038740] env[61545]: DEBUG nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Instance network_info: |[{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 732.038986] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad9e7473-9301-4b82-a3d9-aff257327b9c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.053987] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:e2:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b9f3635-8a28-4d33-be62-134aabc38027', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.061451] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Creating folder: Project (cf5ed987e5404b629b6014e2b74d69d2). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.061768] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ceb1453-5207-47d6-807c-4aade8d0616c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.072147] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 732.072147] env[61545]: value = "task-4255519" [ 732.072147] env[61545]: _type = "Task" [ 732.072147] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.079200] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Created folder: Project (cf5ed987e5404b629b6014e2b74d69d2) in parent group-v838542. [ 732.081649] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Creating folder: Instances. Parent ref: group-v838607. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.084234] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f15a980a-8841-4554-ad9e-468d11bcc66e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.091794] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.421s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.093446] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255519, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.094276] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.425s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.095822] env[61545]: INFO nova.compute.claims [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.107103] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Created folder: Instances in parent group-v838607. [ 732.107103] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 732.109430] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 732.111024] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f59c729-c8a2-43a2-a75a-3e66c5540fc5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.126692] env[61545]: INFO nova.scheduler.client.report [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Deleted allocations for instance 4f879b20-bae0-4d50-b5e9-378356341962 [ 732.133627] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.133627] env[61545]: value = "task-4255521" [ 732.133627] env[61545]: _type = "Task" [ 732.133627] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.140874] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255521, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.147372] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.590068] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255519, 'name': ReconfigVM_Task, 'duration_secs': 0.508994} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.591912] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 9dbff26a-210c-4e80-812f-c91debe3e9c1/9dbff26a-210c-4e80-812f-c91debe3e9c1.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 732.596962] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64c8df6e-2fc3-40c2-9fa7-2a05cd60d7a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.614213] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 732.614213] env[61545]: value = "task-4255522" [ 732.614213] env[61545]: _type = "Task" [ 732.614213] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.636217] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255522, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.637542] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3801f7c6-2ba6-4ed5-95f0-0b41eeb7db25 tempest-InstanceActionsTestJSON-1766073935 tempest-InstanceActionsTestJSON-1766073935-project-member] Lock "4f879b20-bae0-4d50-b5e9-378356341962" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.857s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.649475] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255521, 'name': CreateVM_Task, 'duration_secs': 0.445425} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.649825] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 732.650710] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.651084] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.651776] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 732.651776] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66c09908-a1b4-4672-b614-95a18d808a4a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.661516] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 732.661516] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5222d697-538c-09c4-c95e-07906f14c0cb" [ 732.661516] env[61545]: _type = "Task" [ 732.661516] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.677167] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5222d697-538c-09c4-c95e-07906f14c0cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.815951] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.817739] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.870828] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "d517f427-8580-481b-b50f-150da6c571b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.871142] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "d517f427-8580-481b-b50f-150da6c571b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.908656] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "5b2fb040-a964-479f-ae3f-4f428248d64b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.908656] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "5b2fb040-a964-479f-ae3f-4f428248d64b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.134786] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255522, 'name': Rename_Task, 'duration_secs': 0.233334} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.135516] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 733.135516] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17ba6b71-b208-42b1-8bc2-5fb1803889c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.144516] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 733.144516] env[61545]: value = "task-4255523" [ 733.144516] env[61545]: _type = "Task" [ 733.144516] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.154815] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255523, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.179504] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5222d697-538c-09c4-c95e-07906f14c0cb, 'name': SearchDatastore_Task, 'duration_secs': 0.017151} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.185899] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.186929] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 733.186929] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.186929] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.187871] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 733.187871] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3fb98f6-ac87-456e-b776-5a338ba6294a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.199596] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 733.199596] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 733.200697] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24df35f2-e7e4-414c-9521-80fdd18805c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.214372] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 733.214372] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c11367-894b-1711-e398-95e5909d461e" [ 733.214372] env[61545]: _type = "Task" [ 733.214372] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.230186] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c11367-894b-1711-e398-95e5909d461e, 'name': SearchDatastore_Task, 'duration_secs': 0.012955} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.237486] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aba4385-0e95-483a-b57b-1258414e24f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.249434] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 733.249434] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5289084b-0a93-e9c8-a02d-f2e9e339d48a" [ 733.249434] env[61545]: _type = "Task" [ 733.249434] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.262074] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5289084b-0a93-e9c8-a02d-f2e9e339d48a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.660166] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255523, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.678477] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Successfully updated port: 0c7a22d0-b4ff-436d-bb77-faa9c853dd35 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.768443] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5289084b-0a93-e9c8-a02d-f2e9e339d48a, 'name': SearchDatastore_Task, 'duration_secs': 0.013649} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.768965] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.768965] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f/6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 733.769854] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd861865-2461-40ff-a251-aa07e33fb728 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.778804] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 733.778804] env[61545]: value = "task-4255524" [ 733.778804] env[61545]: _type = "Task" [ 733.778804] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.795051] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "609ba431-b42b-4b0d-9c16-06e19bee114c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.795051] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "609ba431-b42b-4b0d-9c16-06e19bee114c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.799371] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255524, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.968842] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c8be25-df12-4305-8d29-f68e05496199 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.978474] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5d2382-6ee8-4a8f-b5c5-9b6c7afef41d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.015754] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d8255b-4730-4739-93cf-7754473d51fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.024752] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c75818-9c99-400c-9368-5455526bde1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.044142] env[61545]: DEBUG nova.compute.provider_tree [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.157872] env[61545]: DEBUG oslo_vmware.api [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255523, 'name': PowerOnVM_Task, 'duration_secs': 0.600793} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.157872] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 734.157872] env[61545]: INFO nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Took 10.38 seconds to spawn the instance on the hypervisor. [ 734.157872] env[61545]: DEBUG nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 734.158587] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ac1089-5988-43fc-9a00-be072273b68d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.185105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "refresh_cache-72656070-cfd0-4104-a9c7-ec20c5a6238a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.185984] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "refresh_cache-72656070-cfd0-4104-a9c7-ec20c5a6238a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.185984] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.223885] env[61545]: DEBUG nova.compute.manager [req-4572486f-5a1c-442b-8edd-afb479713337 req-25cb2862-22ea-423a-8a3f-bea333391ca1 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Received event network-changed-2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 734.224753] env[61545]: DEBUG nova.compute.manager [req-4572486f-5a1c-442b-8edd-afb479713337 req-25cb2862-22ea-423a-8a3f-bea333391ca1 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Refreshing instance network info cache due to event network-changed-2b9f3635-8a28-4d33-be62-134aabc38027. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 734.224753] env[61545]: DEBUG oslo_concurrency.lockutils [req-4572486f-5a1c-442b-8edd-afb479713337 req-25cb2862-22ea-423a-8a3f-bea333391ca1 service nova] Acquiring lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.224753] env[61545]: DEBUG oslo_concurrency.lockutils [req-4572486f-5a1c-442b-8edd-afb479713337 req-25cb2862-22ea-423a-8a3f-bea333391ca1 service nova] Acquired lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.224753] env[61545]: DEBUG nova.network.neutron [req-4572486f-5a1c-442b-8edd-afb479713337 req-25cb2862-22ea-423a-8a3f-bea333391ca1 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Refreshing network info cache for port 2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.294410] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255524, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.547024] env[61545]: DEBUG nova.scheduler.client.report [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 734.686322] env[61545]: INFO nova.compute.manager [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Took 39.41 seconds to build instance. [ 734.794420] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255524, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607792} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.795039] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f/6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 734.795329] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 734.795743] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4cfd311f-ac52-4fb6-8392-f3c530db807e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.804393] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 734.804393] env[61545]: value = "task-4255525" [ 734.804393] env[61545]: _type = "Task" [ 734.804393] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.816309] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.052872] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.958s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.053404] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 735.056057] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.901s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.059498] env[61545]: DEBUG nova.objects.instance [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lazy-loading 'resources' on Instance uuid 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.075188] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.192550] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5be6e364-7c4e-4253-bdf4-0a0bd5af65eb tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "9dbff26a-210c-4e80-812f-c91debe3e9c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.925s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.319814] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074586} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.319974] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.323952] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb491ad-55ef-4289-81a0-69a91574ff03 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.355255] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f/6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.356134] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-357a8190-f213-4db4-8dff-ef844e902276 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.382598] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 735.382598] env[61545]: value = "task-4255526" [ 735.382598] env[61545]: _type = "Task" [ 735.382598] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.393401] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.560998] env[61545]: DEBUG nova.compute.utils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 735.571255] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 735.572783] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.692510] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Updating instance_info_cache with network_info: [{"id": "0c7a22d0-b4ff-436d-bb77-faa9c853dd35", "address": "fa:16:3e:77:1b:12", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c7a22d0-b4", "ovs_interfaceid": "0c7a22d0-b4ff-436d-bb77-faa9c853dd35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.697735] env[61545]: DEBUG nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 735.776084] env[61545]: DEBUG nova.policy [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4943bc31b1c4f4396688c44c677db0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d2daf4ae4164d9c83882d0e64124316', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 735.802569] env[61545]: DEBUG nova.network.neutron [req-4572486f-5a1c-442b-8edd-afb479713337 req-25cb2862-22ea-423a-8a3f-bea333391ca1 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updated VIF entry in instance network info cache for port 2b9f3635-8a28-4d33-be62-134aabc38027. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 735.804058] env[61545]: DEBUG nova.network.neutron [req-4572486f-5a1c-442b-8edd-afb479713337 req-25cb2862-22ea-423a-8a3f-bea333391ca1 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.895586] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255526, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.068307] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 736.201877] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "refresh_cache-72656070-cfd0-4104-a9c7-ec20c5a6238a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.202221] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Instance network_info: |[{"id": "0c7a22d0-b4ff-436d-bb77-faa9c853dd35", "address": "fa:16:3e:77:1b:12", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c7a22d0-b4", "ovs_interfaceid": "0c7a22d0-b4ff-436d-bb77-faa9c853dd35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 736.205244] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:1b:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c80243e-93a7-4a95-bc8d-e9534bacd66e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c7a22d0-b4ff-436d-bb77-faa9c853dd35', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.216662] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Creating folder: Project (5d2daf4ae4164d9c83882d0e64124316). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 736.219656] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12ce6a88-fde0-47be-b49a-cdf9f22eb616 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.234983] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Created folder: Project (5d2daf4ae4164d9c83882d0e64124316) in parent group-v838542. [ 736.235202] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Creating folder: Instances. Parent ref: group-v838610. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 736.235460] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4127b9aa-6d61-444a-a7c9-516de8b40052 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.241952] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.244482] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f149331c-de75-4b27-b316-9f5524eb9128 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.248980] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Created folder: Instances in parent group-v838610. [ 736.249250] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 736.249879] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 736.250074] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-017d99ed-2f9d-41af-bd9c-234928c660ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.268685] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261d6b59-4875-427e-8f47-8d5034ddbff8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.274011] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 736.274011] env[61545]: value = "task-4255529" [ 736.274011] env[61545]: _type = "Task" [ 736.274011] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.306939] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d837fba-f6ee-4944-8f87-3e5179eb50b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.313522] env[61545]: DEBUG oslo_concurrency.lockutils [req-4572486f-5a1c-442b-8edd-afb479713337 req-25cb2862-22ea-423a-8a3f-bea333391ca1 service nova] Releasing lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.314709] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255529, 'name': CreateVM_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.319537] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7577f2cd-2a9b-40d0-aa55-28f348977f1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.339289] env[61545]: DEBUG nova.compute.provider_tree [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.395648] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255526, 'name': ReconfigVM_Task, 'duration_secs': 0.517302} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.395992] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f/6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.396975] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1dfce7ab-c168-4d86-b2ee-26e20bb31d32 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.405953] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 736.405953] env[61545]: value = "task-4255530" [ 736.405953] env[61545]: _type = "Task" [ 736.405953] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.417102] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255530, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.786690] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255529, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.843354] env[61545]: DEBUG nova.scheduler.client.report [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.899984] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquiring lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.899984] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.923024] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255530, 'name': Rename_Task, 'duration_secs': 0.295277} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.923024] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 736.923024] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2ddc328-9dc7-4149-b1d7-046254f5dca9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.928557] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Successfully created port: 832392a5-d47c-47ed-9aaa-c361e28e2418 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.934617] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 736.934617] env[61545]: value = "task-4255531" [ 736.934617] env[61545]: _type = "Task" [ 736.934617] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.941035] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.214716] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.243581] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.243581] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.243581] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.243988] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.243988] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.243988] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.243988] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.243988] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.244190] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.244963] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.246924] env[61545]: DEBUG nova.virt.hardware [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.246924] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b92d219-aaa9-483e-bee6-a95f93b33b26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.257085] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade7b224-e522-4661-ba20-6d8883ecacfd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.289962] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255529, 'name': CreateVM_Task, 'duration_secs': 0.530793} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.289962] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 737.289962] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.289962] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.289962] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 737.290205] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80a90717-515f-4550-92c5-156cb47094a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.296647] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 737.296647] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52efadcd-e37b-e633-579d-9d7056efbd1b" [ 737.296647] env[61545]: _type = "Task" [ 737.296647] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.310438] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52efadcd-e37b-e633-579d-9d7056efbd1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.350491] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.293s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.353168] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.902s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.356235] env[61545]: INFO nova.compute.claims [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.415090] env[61545]: INFO nova.scheduler.client.report [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Deleted allocations for instance 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3 [ 737.460151] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255531, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.476728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.476998] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.543158] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "c1b1ac1a-32da-442d-86ef-d754165f5a81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.543261] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "c1b1ac1a-32da-442d-86ef-d754165f5a81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.544071] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "c1b1ac1a-32da-442d-86ef-d754165f5a81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.544071] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "c1b1ac1a-32da-442d-86ef-d754165f5a81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.544071] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "c1b1ac1a-32da-442d-86ef-d754165f5a81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.546511] env[61545]: INFO nova.compute.manager [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Terminating instance [ 737.686109] env[61545]: DEBUG nova.compute.manager [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Received event network-vif-plugged-0c7a22d0-b4ff-436d-bb77-faa9c853dd35 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 737.686267] env[61545]: DEBUG oslo_concurrency.lockutils [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] Acquiring lock "72656070-cfd0-4104-a9c7-ec20c5a6238a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.686490] env[61545]: DEBUG oslo_concurrency.lockutils [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] Lock "72656070-cfd0-4104-a9c7-ec20c5a6238a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.686745] env[61545]: DEBUG oslo_concurrency.lockutils [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] Lock "72656070-cfd0-4104-a9c7-ec20c5a6238a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.687112] env[61545]: DEBUG nova.compute.manager [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] No waiting events found dispatching network-vif-plugged-0c7a22d0-b4ff-436d-bb77-faa9c853dd35 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 737.687192] env[61545]: WARNING nova.compute.manager [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Received unexpected event network-vif-plugged-0c7a22d0-b4ff-436d-bb77-faa9c853dd35 for instance with vm_state building and task_state spawning. [ 737.687310] env[61545]: DEBUG nova.compute.manager [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Received event network-changed-0c7a22d0-b4ff-436d-bb77-faa9c853dd35 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 737.687655] env[61545]: DEBUG nova.compute.manager [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Refreshing instance network info cache due to event network-changed-0c7a22d0-b4ff-436d-bb77-faa9c853dd35. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 737.687753] env[61545]: DEBUG oslo_concurrency.lockutils [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] Acquiring lock "refresh_cache-72656070-cfd0-4104-a9c7-ec20c5a6238a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.688060] env[61545]: DEBUG oslo_concurrency.lockutils [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] Acquired lock "refresh_cache-72656070-cfd0-4104-a9c7-ec20c5a6238a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.688554] env[61545]: DEBUG nova.network.neutron [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Refreshing network info cache for port 0c7a22d0-b4ff-436d-bb77-faa9c853dd35 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 737.810221] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52efadcd-e37b-e633-579d-9d7056efbd1b, 'name': SearchDatastore_Task, 'duration_secs': 0.016303} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.811170] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.811170] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.811170] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.811170] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.811382] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.811720] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a0eb58d-f5ad-47b0-b5b8-7f86e17d1541 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.822361] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.822647] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.823457] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fee42871-dc2b-404b-b7d3-a3c29f30f821 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.830102] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 737.830102] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52058d23-d311-a5dd-5d5f-9df9502996ee" [ 737.830102] env[61545]: _type = "Task" [ 737.830102] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.840212] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52058d23-d311-a5dd-5d5f-9df9502996ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.931441] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d73ac81-f036-48c9-b397-5442cf6e0422 tempest-InstanceActionsV221TestJSON-1438125054 tempest-InstanceActionsV221TestJSON-1438125054-project-member] Lock "1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.955s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.949231] env[61545]: DEBUG oslo_vmware.api [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255531, 'name': PowerOnVM_Task, 'duration_secs': 0.738578} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.949231] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 737.949340] env[61545]: INFO nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Took 11.45 seconds to spawn the instance on the hypervisor. [ 737.949463] env[61545]: DEBUG nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 737.950663] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fdfb1e-b0b1-40ff-b215-f8ca8b6cbe40 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.976157] env[61545]: DEBUG nova.compute.manager [None req-a3782ced-d511-43f3-8bb8-50a5b05c64dd tempest-ServerExternalEventsTest-1016855713 tempest-ServerExternalEventsTest-1016855713-project] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Received event network-changed {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 737.976157] env[61545]: DEBUG nova.compute.manager [None req-a3782ced-d511-43f3-8bb8-50a5b05c64dd tempest-ServerExternalEventsTest-1016855713 tempest-ServerExternalEventsTest-1016855713-project] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Refreshing instance network info cache due to event network-changed. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 737.976157] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a3782ced-d511-43f3-8bb8-50a5b05c64dd tempest-ServerExternalEventsTest-1016855713 tempest-ServerExternalEventsTest-1016855713-project] Acquiring lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.976157] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a3782ced-d511-43f3-8bb8-50a5b05c64dd tempest-ServerExternalEventsTest-1016855713 tempest-ServerExternalEventsTest-1016855713-project] Acquired lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.976157] env[61545]: DEBUG nova.network.neutron [None req-a3782ced-d511-43f3-8bb8-50a5b05c64dd tempest-ServerExternalEventsTest-1016855713 tempest-ServerExternalEventsTest-1016855713-project] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.054035] env[61545]: DEBUG nova.compute.manager [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 738.054035] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.054035] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102bfefd-b56a-4ab7-86ce-3325125e5f2b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.062023] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.062023] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f64979f-5994-48b1-b6b3-58d4ca2a0296 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.071217] env[61545]: DEBUG oslo_vmware.api [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 738.071217] env[61545]: value = "task-4255532" [ 738.071217] env[61545]: _type = "Task" [ 738.071217] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.079665] env[61545]: DEBUG oslo_vmware.api [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255532, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.332110] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquiring lock "6f2a4514-4de9-427d-91be-f445235696bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.332358] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "6f2a4514-4de9-427d-91be-f445235696bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.346927] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52058d23-d311-a5dd-5d5f-9df9502996ee, 'name': SearchDatastore_Task, 'duration_secs': 0.012703} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.346927] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e6d937f-2c7c-4dbd-b154-cc341674e8c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.353629] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 738.353629] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529484f8-e4d0-c105-e872-06157432fd5d" [ 738.353629] env[61545]: _type = "Task" [ 738.353629] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.364779] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529484f8-e4d0-c105-e872-06157432fd5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.483329] env[61545]: INFO nova.compute.manager [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Took 41.06 seconds to build instance. [ 738.582337] env[61545]: DEBUG oslo_vmware.api [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255532, 'name': PowerOffVM_Task, 'duration_secs': 0.237411} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.587024] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.587024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.587024] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf0d2942-b0ec-404d-979c-e9013fc5a33c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.656994] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.657229] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.658708] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Deleting the datastore file [datastore2] c1b1ac1a-32da-442d-86ef-d754165f5a81 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.658708] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31b59bdd-6dbb-4175-a2f6-37644918d574 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.670380] env[61545]: DEBUG oslo_vmware.api [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for the task: (returnval){ [ 738.670380] env[61545]: value = "task-4255534" [ 738.670380] env[61545]: _type = "Task" [ 738.670380] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.684523] env[61545]: DEBUG oslo_vmware.api [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.835525] env[61545]: DEBUG nova.network.neutron [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Updated VIF entry in instance network info cache for port 0c7a22d0-b4ff-436d-bb77-faa9c853dd35. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 738.835921] env[61545]: DEBUG nova.network.neutron [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Updating instance_info_cache with network_info: [{"id": "0c7a22d0-b4ff-436d-bb77-faa9c853dd35", "address": "fa:16:3e:77:1b:12", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c7a22d0-b4", "ovs_interfaceid": "0c7a22d0-b4ff-436d-bb77-faa9c853dd35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.872343] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529484f8-e4d0-c105-e872-06157432fd5d, 'name': SearchDatastore_Task, 'duration_secs': 0.011423} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.872547] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.872816] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 72656070-cfd0-4104-a9c7-ec20c5a6238a/72656070-cfd0-4104-a9c7-ec20c5a6238a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.873107] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6435950-14db-43e1-bc6b-587375bd78e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.882674] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 738.882674] env[61545]: value = "task-4255535" [ 738.882674] env[61545]: _type = "Task" [ 738.882674] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.897492] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.936540] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquiring lock "9dbff26a-210c-4e80-812f-c91debe3e9c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.936914] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "9dbff26a-210c-4e80-812f-c91debe3e9c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.937184] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquiring lock "9dbff26a-210c-4e80-812f-c91debe3e9c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.937386] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "9dbff26a-210c-4e80-812f-c91debe3e9c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.937577] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "9dbff26a-210c-4e80-812f-c91debe3e9c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.944074] env[61545]: INFO nova.compute.manager [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Terminating instance [ 738.985392] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef5fcf5a-fc4d-4f25-b398-085d860beda8 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.573s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.055402] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd51fbfe-ac35-4c53-b829-ffb520208f28 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.064953] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165d3a16-d767-46be-97b1-fcda12213537 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.113098] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91f9cb9-16fb-4fe0-bbfc-94509dedd4d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.123256] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748e3290-b9e1-4dcb-b94c-bcd6e23635fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.141512] env[61545]: DEBUG nova.compute.provider_tree [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.185778] env[61545]: DEBUG oslo_vmware.api [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Task: {'id': task-4255534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189544} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.186266] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.186600] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.187079] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.188921] env[61545]: INFO nova.compute.manager [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Took 1.14 seconds to destroy the instance on the hypervisor. [ 739.188921] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.191460] env[61545]: DEBUG nova.compute.manager [-] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 739.191617] env[61545]: DEBUG nova.network.neutron [-] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.346089] env[61545]: DEBUG oslo_concurrency.lockutils [req-f1357d81-b480-4733-a01e-d0563d836681 req-13af4fbb-a932-4a17-a6de-0b82cec551cb service nova] Releasing lock "refresh_cache-72656070-cfd0-4104-a9c7-ec20c5a6238a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.396656] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255535, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.451515] env[61545]: DEBUG nova.compute.manager [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 739.452508] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 739.453869] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d6ec76-d755-4c78-8078-4a2e0ae07baa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.464096] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.464702] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba298ac4-13eb-478e-af20-db685f99b0a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.472903] env[61545]: DEBUG oslo_vmware.api [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 739.472903] env[61545]: value = "task-4255536" [ 739.472903] env[61545]: _type = "Task" [ 739.472903] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.485621] env[61545]: DEBUG oslo_vmware.api [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.488000] env[61545]: DEBUG nova.network.neutron [None req-a3782ced-d511-43f3-8bb8-50a5b05c64dd tempest-ServerExternalEventsTest-1016855713 tempest-ServerExternalEventsTest-1016855713-project] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Updating instance_info_cache with network_info: [{"id": "041534e2-0ec0-43ea-84f9-d0cfdec6808c", "address": "fa:16:3e:4d:e5:00", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041534e2-0e", "ovs_interfaceid": "041534e2-0ec0-43ea-84f9-d0cfdec6808c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.489871] env[61545]: DEBUG nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 739.648023] env[61545]: DEBUG nova.scheduler.client.report [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 739.705362] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Successfully updated port: 832392a5-d47c-47ed-9aaa-c361e28e2418 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.894142] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255535, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.894486] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 72656070-cfd0-4104-a9c7-ec20c5a6238a/72656070-cfd0-4104-a9c7-ec20c5a6238a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.894720] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.895024] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f54360c-3512-4f62-a72e-4b4fefb72e0f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.906326] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 739.906326] env[61545]: value = "task-4255537" [ 739.906326] env[61545]: _type = "Task" [ 739.906326] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.915228] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255537, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.985172] env[61545]: DEBUG oslo_vmware.api [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255536, 'name': PowerOffVM_Task, 'duration_secs': 0.377525} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.985172] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 739.985323] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 739.985670] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02bd9279-995c-45f1-bd3c-32a80adad27a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.993647] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a3782ced-d511-43f3-8bb8-50a5b05c64dd tempest-ServerExternalEventsTest-1016855713 tempest-ServerExternalEventsTest-1016855713-project] Releasing lock "refresh_cache-9dbff26a-210c-4e80-812f-c91debe3e9c1" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.021079] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.064863] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 740.065125] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 740.065399] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Deleting the datastore file [datastore2] 9dbff26a-210c-4e80-812f-c91debe3e9c1 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.065763] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-153cb3da-0d9a-45c6-ada9-9f86cad70ee1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.074963] env[61545]: DEBUG oslo_vmware.api [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for the task: (returnval){ [ 740.074963] env[61545]: value = "task-4255539" [ 740.074963] env[61545]: _type = "Task" [ 740.074963] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.084787] env[61545]: DEBUG oslo_vmware.api [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.153075] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.800s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.153783] env[61545]: DEBUG nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 740.159377] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.303s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.161012] env[61545]: INFO nova.compute.claims [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.189874] env[61545]: DEBUG nova.compute.manager [req-9c12fa47-e03d-4eba-9eb3-4fd1ded723c4 req-78faaf6e-ee48-496f-8d92-c40d6dd46987 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Received event network-vif-plugged-832392a5-d47c-47ed-9aaa-c361e28e2418 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 740.190409] env[61545]: DEBUG oslo_concurrency.lockutils [req-9c12fa47-e03d-4eba-9eb3-4fd1ded723c4 req-78faaf6e-ee48-496f-8d92-c40d6dd46987 service nova] Acquiring lock "79ba6f70-c967-4abf-a2a7-c70046a2602d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.190567] env[61545]: DEBUG oslo_concurrency.lockutils [req-9c12fa47-e03d-4eba-9eb3-4fd1ded723c4 req-78faaf6e-ee48-496f-8d92-c40d6dd46987 service nova] Lock "79ba6f70-c967-4abf-a2a7-c70046a2602d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.190701] env[61545]: DEBUG oslo_concurrency.lockutils [req-9c12fa47-e03d-4eba-9eb3-4fd1ded723c4 req-78faaf6e-ee48-496f-8d92-c40d6dd46987 service nova] Lock "79ba6f70-c967-4abf-a2a7-c70046a2602d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.191223] env[61545]: DEBUG nova.compute.manager [req-9c12fa47-e03d-4eba-9eb3-4fd1ded723c4 req-78faaf6e-ee48-496f-8d92-c40d6dd46987 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] No waiting events found dispatching network-vif-plugged-832392a5-d47c-47ed-9aaa-c361e28e2418 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 740.191512] env[61545]: WARNING nova.compute.manager [req-9c12fa47-e03d-4eba-9eb3-4fd1ded723c4 req-78faaf6e-ee48-496f-8d92-c40d6dd46987 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Received unexpected event network-vif-plugged-832392a5-d47c-47ed-9aaa-c361e28e2418 for instance with vm_state building and task_state spawning. [ 740.211979] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "refresh_cache-79ba6f70-c967-4abf-a2a7-c70046a2602d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.212203] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "refresh_cache-79ba6f70-c967-4abf-a2a7-c70046a2602d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.212405] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.288468] env[61545]: DEBUG oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b6e2b2-a1cd-dc62-24f7-292b41d99be9/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 740.289515] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96e6800-bf64-40dc-911a-2d73827f43e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.297315] env[61545]: DEBUG oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b6e2b2-a1cd-dc62-24f7-292b41d99be9/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 740.297541] env[61545]: ERROR oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b6e2b2-a1cd-dc62-24f7-292b41d99be9/disk-0.vmdk due to incomplete transfer. [ 740.297824] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6d5a7551-61fe-458e-859c-24384e7f969d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.313816] env[61545]: DEBUG oslo_vmware.rw_handles [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b6e2b2-a1cd-dc62-24f7-292b41d99be9/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 740.314115] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Uploaded image 68473fee-5958-47fb-a73e-d1afcfee7aae to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 740.316537] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 740.316827] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-dfc0d430-715a-4335-8122-bf2c387b3b5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.324475] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 740.324475] env[61545]: value = "task-4255540" [ 740.324475] env[61545]: _type = "Task" [ 740.324475] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.337553] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255540, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.341034] env[61545]: DEBUG nova.network.neutron [-] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.418671] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255537, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075674} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.420177] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.421106] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059786f8-3631-484a-a332-9189d08ccdfb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.449519] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 72656070-cfd0-4104-a9c7-ec20c5a6238a/72656070-cfd0-4104-a9c7-ec20c5a6238a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.449883] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3bb14aa-7d2b-4e19-9ba2-ff208befc584 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.470187] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 740.470187] env[61545]: value = "task-4255541" [ 740.470187] env[61545]: _type = "Task" [ 740.470187] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.483056] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.354845] env[61545]: DEBUG nova.compute.utils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 741.357065] env[61545]: INFO nova.compute.manager [-] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Took 2.17 seconds to deallocate network for instance. [ 741.357428] env[61545]: DEBUG oslo_vmware.api [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Task: {'id': task-4255539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157811} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.358058] env[61545]: DEBUG nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 741.358249] env[61545]: DEBUG nova.network.neutron [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 741.367570] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.367830] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 741.368057] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.368249] env[61545]: INFO nova.compute.manager [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Took 1.92 seconds to destroy the instance on the hypervisor. [ 741.368873] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.372598] env[61545]: DEBUG nova.compute.manager [-] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 741.372690] env[61545]: DEBUG nova.network.neutron [-] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.382339] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255540, 'name': Destroy_Task, 'duration_secs': 0.600791} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.385090] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Destroyed the VM [ 741.385415] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 741.385753] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255541, 'name': ReconfigVM_Task, 'duration_secs': 0.868917} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.386279] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bbd66daa-b665-4816-9155-9229538735c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.388350] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 72656070-cfd0-4104-a9c7-ec20c5a6238a/72656070-cfd0-4104-a9c7-ec20c5a6238a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.389192] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19895ce5-5734-4614-9632-76bb70196abc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.397882] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 741.397882] env[61545]: value = "task-4255543" [ 741.397882] env[61545]: _type = "Task" [ 741.397882] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.398518] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 741.398518] env[61545]: value = "task-4255542" [ 741.398518] env[61545]: _type = "Task" [ 741.398518] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.421399] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255543, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.421674] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255542, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.469593] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.601731] env[61545]: DEBUG nova.policy [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d5677e0c7a74f8abe4dd364c619b47f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a1542baa59a4be387f3fe1526116d37', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 741.651893] env[61545]: DEBUG nova.compute.manager [req-b4ea9945-8379-4986-9c40-7a5b396b7e0c req-be85cacf-5abd-45e8-a464-3df90b42e6ac service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Received event network-changed-2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 741.652106] env[61545]: DEBUG nova.compute.manager [req-b4ea9945-8379-4986-9c40-7a5b396b7e0c req-be85cacf-5abd-45e8-a464-3df90b42e6ac service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Refreshing instance network info cache due to event network-changed-2b9f3635-8a28-4d33-be62-134aabc38027. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 741.652426] env[61545]: DEBUG oslo_concurrency.lockutils [req-b4ea9945-8379-4986-9c40-7a5b396b7e0c req-be85cacf-5abd-45e8-a464-3df90b42e6ac service nova] Acquiring lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.652511] env[61545]: DEBUG oslo_concurrency.lockutils [req-b4ea9945-8379-4986-9c40-7a5b396b7e0c req-be85cacf-5abd-45e8-a464-3df90b42e6ac service nova] Acquired lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.652898] env[61545]: DEBUG nova.network.neutron [req-b4ea9945-8379-4986-9c40-7a5b396b7e0c req-be85cacf-5abd-45e8-a464-3df90b42e6ac service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Refreshing network info cache for port 2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 741.858748] env[61545]: DEBUG nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 741.876401] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.920640] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255543, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.926064] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255542, 'name': Rename_Task, 'duration_secs': 0.192703} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.934777] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.935297] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c40fed51-1238-4d00-a96a-828fe0dd72e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.944350] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.944502] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.944825] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.945126] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.945950] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.948287] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 741.948287] env[61545]: value = "task-4255544" [ 741.948287] env[61545]: _type = "Task" [ 741.948287] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.948287] env[61545]: INFO nova.compute.manager [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Terminating instance [ 741.971080] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255544, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.973424] env[61545]: DEBUG nova.network.neutron [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Updating instance_info_cache with network_info: [{"id": "832392a5-d47c-47ed-9aaa-c361e28e2418", "address": "fa:16:3e:96:02:0e", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap832392a5-d4", "ovs_interfaceid": "832392a5-d47c-47ed-9aaa-c361e28e2418", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.416308] env[61545]: DEBUG oslo_vmware.api [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255543, 'name': RemoveSnapshot_Task, 'duration_secs': 0.818657} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.417909] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 742.417909] env[61545]: INFO nova.compute.manager [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Took 17.37 seconds to snapshot the instance on the hypervisor. [ 742.460293] env[61545]: DEBUG nova.compute.manager [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 742.460604] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.469650] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c629815d-d2ad-4bc2-b9dc-3e5ab5cb6548 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.472587] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255544, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.477795] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "refresh_cache-79ba6f70-c967-4abf-a2a7-c70046a2602d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.478551] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Instance network_info: |[{"id": "832392a5-d47c-47ed-9aaa-c361e28e2418", "address": "fa:16:3e:96:02:0e", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap832392a5-d4", "ovs_interfaceid": "832392a5-d47c-47ed-9aaa-c361e28e2418", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 742.488161] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:02:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c80243e-93a7-4a95-bc8d-e9534bacd66e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '832392a5-d47c-47ed-9aaa-c361e28e2418', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.494952] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 742.500231] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "43cf4c96-2c8b-4520-8926-c1be5a87734e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.500609] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "43cf4c96-2c8b-4520-8926-c1be5a87734e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.500873] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "43cf4c96-2c8b-4520-8926-c1be5a87734e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.501095] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "43cf4c96-2c8b-4520-8926-c1be5a87734e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.502130] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "43cf4c96-2c8b-4520-8926-c1be5a87734e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.503057] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 742.503419] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 742.503916] env[61545]: INFO nova.compute.manager [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Terminating instance [ 742.509343] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-810b2256-2e9e-42d3-a798-07a5fb927a65 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.530417] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3868363f-1d79-4e58-9584-17b0b8531673 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.543015] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.543015] env[61545]: value = "task-4255546" [ 742.543015] env[61545]: _type = "Task" [ 742.543015] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.555493] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255546, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.625086] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 742.625222] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 742.625421] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleting the datastore file [datastore2] 26a6b40e-f8a4-4cc6-bdbb-586ca592901c {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.625747] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c97b0ea4-e10b-4173-b588-d5722ada64de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.643340] env[61545]: DEBUG oslo_vmware.api [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 742.643340] env[61545]: value = "task-4255547" [ 742.643340] env[61545]: _type = "Task" [ 742.643340] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.655998] env[61545]: DEBUG oslo_vmware.api [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.657890] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efcc127-871b-4542-a789-92ce59ae1ca2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.666346] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a8190e-1b9a-4d3e-94bf-7a7aa86661a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.707097] env[61545]: DEBUG nova.network.neutron [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Successfully created port: af3b8dc6-019b-4076-b26a-f6eaaa30a979 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.710037] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bd08b3-ab95-454d-b34d-919cb520d8be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.721367] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd27a43-c639-477e-9ce3-7cb020124dc0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.741502] env[61545]: DEBUG nova.compute.provider_tree [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.749990] env[61545]: DEBUG nova.network.neutron [-] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.864741] env[61545]: DEBUG nova.network.neutron [req-b4ea9945-8379-4986-9c40-7a5b396b7e0c req-be85cacf-5abd-45e8-a464-3df90b42e6ac service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updated VIF entry in instance network info cache for port 2b9f3635-8a28-4d33-be62-134aabc38027. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 742.865113] env[61545]: DEBUG nova.network.neutron [req-b4ea9945-8379-4986-9c40-7a5b396b7e0c req-be85cacf-5abd-45e8-a464-3df90b42e6ac service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.873306] env[61545]: DEBUG nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 742.922078] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 742.922078] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.922078] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 742.922078] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.922348] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 742.922348] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 742.922348] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 742.922348] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 742.922348] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 742.922601] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 742.922601] env[61545]: DEBUG nova.virt.hardware [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 742.923216] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4564cd9-ae80-42d3-a77e-7c6c66d2fdde {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.930046] env[61545]: DEBUG nova.compute.manager [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Instance disappeared during snapshot {{(pid=61545) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 742.938658] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6c9da0-bcba-41f2-adad-6b344f738c9f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.955377] env[61545]: DEBUG nova.compute.manager [None req-34e2eaf2-79f3-46f2-8020-619162283321 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image not found during clean up 68473fee-5958-47fb-a73e-d1afcfee7aae {{(pid=61545) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 742.964995] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255544, 'name': PowerOnVM_Task, 'duration_secs': 0.671311} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.966025] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.966025] env[61545]: INFO nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Took 11.23 seconds to spawn the instance on the hypervisor. [ 742.966602] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 742.967070] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16bf719-1965-4702-b8c2-09e80d453134 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.037554] env[61545]: DEBUG nova.compute.manager [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 743.037811] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 743.039314] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d188b60-c15f-44fb-938a-e35b14a526a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.051154] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 743.051878] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-927c2580-eec7-4816-8387-11c033e4167c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.058689] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255546, 'name': CreateVM_Task, 'duration_secs': 0.411434} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.058689] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 743.059291] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.059477] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.060133] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 743.060133] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-385bdaae-fc99-41ee-82ef-d727f743b4a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.065291] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 743.065291] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52134e35-363b-578b-d178-d078a01b3b3f" [ 743.065291] env[61545]: _type = "Task" [ 743.065291] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.078613] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52134e35-363b-578b-d178-d078a01b3b3f, 'name': SearchDatastore_Task, 'duration_secs': 0.010779} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.079041] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.079302] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.079616] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.079773] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.079949] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.080230] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84873043-059b-4c4e-88f5-67c6b77fcafb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.093350] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.093350] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 743.093620] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48c0e8e0-03bc-42bc-a245-42dbb41e6ff0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.100479] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 743.100479] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c23038-0649-e9a9-c906-4403864d8db0" [ 743.100479] env[61545]: _type = "Task" [ 743.100479] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.109429] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c23038-0649-e9a9-c906-4403864d8db0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.125974] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 743.126227] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 743.126437] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Deleting the datastore file [datastore2] 43cf4c96-2c8b-4520-8926-c1be5a87734e {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.127030] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-521c42f8-4e76-4aaa-b13f-caec4967ae8f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.135182] env[61545]: DEBUG oslo_vmware.api [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 743.135182] env[61545]: value = "task-4255549" [ 743.135182] env[61545]: _type = "Task" [ 743.135182] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.144815] env[61545]: DEBUG oslo_vmware.api [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.154443] env[61545]: DEBUG oslo_vmware.api [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177847} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.154919] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.154987] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 743.155505] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.155505] env[61545]: INFO nova.compute.manager [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Took 0.69 seconds to destroy the instance on the hypervisor. [ 743.155636] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.156231] env[61545]: DEBUG nova.compute.manager [-] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 743.156362] env[61545]: DEBUG nova.network.neutron [-] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.242951] env[61545]: DEBUG nova.scheduler.client.report [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.254317] env[61545]: INFO nova.compute.manager [-] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Took 1.88 seconds to deallocate network for instance. [ 743.258020] env[61545]: DEBUG nova.compute.manager [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Received event network-changed-832392a5-d47c-47ed-9aaa-c361e28e2418 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 743.258020] env[61545]: DEBUG nova.compute.manager [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Refreshing instance network info cache due to event network-changed-832392a5-d47c-47ed-9aaa-c361e28e2418. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 743.259581] env[61545]: DEBUG oslo_concurrency.lockutils [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] Acquiring lock "refresh_cache-79ba6f70-c967-4abf-a2a7-c70046a2602d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.259581] env[61545]: DEBUG oslo_concurrency.lockutils [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] Acquired lock "refresh_cache-79ba6f70-c967-4abf-a2a7-c70046a2602d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.259581] env[61545]: DEBUG nova.network.neutron [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Refreshing network info cache for port 832392a5-d47c-47ed-9aaa-c361e28e2418 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 743.371525] env[61545]: DEBUG oslo_concurrency.lockutils [req-b4ea9945-8379-4986-9c40-7a5b396b7e0c req-be85cacf-5abd-45e8-a464-3df90b42e6ac service nova] Releasing lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.487944] env[61545]: INFO nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Took 45.08 seconds to build instance. [ 743.538022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "13db992b-db13-451f-a853-9b7de28b9184" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.538022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "13db992b-db13-451f-a853-9b7de28b9184" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.614363] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c23038-0649-e9a9-c906-4403864d8db0, 'name': SearchDatastore_Task, 'duration_secs': 0.015284} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.615196] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f374555d-d76e-4aa0-9002-1c69460e24d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.621519] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 743.621519] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52dc3fb6-24d9-87d3-14ec-9a7b4859258b" [ 743.621519] env[61545]: _type = "Task" [ 743.621519] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.630366] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52dc3fb6-24d9-87d3-14ec-9a7b4859258b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.646478] env[61545]: DEBUG oslo_vmware.api [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255549, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171172} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.646478] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.646478] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 743.646478] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.646478] env[61545]: INFO nova.compute.manager [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 743.646743] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.646828] env[61545]: DEBUG nova.compute.manager [-] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 743.646922] env[61545]: DEBUG nova.network.neutron [-] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.748665] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.592s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.749234] env[61545]: DEBUG nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 743.756020] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.262s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.756020] env[61545]: DEBUG nova.objects.instance [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lazy-loading 'resources' on Instance uuid 8214216a-0256-467e-ac4c-1d14b0f73b77 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 743.767474] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.990730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "72656070-cfd0-4104-a9c7-ec20c5a6238a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.591s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.012101] env[61545]: DEBUG nova.network.neutron [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Updated VIF entry in instance network info cache for port 832392a5-d47c-47ed-9aaa-c361e28e2418. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 744.012747] env[61545]: DEBUG nova.network.neutron [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Updating instance_info_cache with network_info: [{"id": "832392a5-d47c-47ed-9aaa-c361e28e2418", "address": "fa:16:3e:96:02:0e", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap832392a5-d4", "ovs_interfaceid": "832392a5-d47c-47ed-9aaa-c361e28e2418", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.144330] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52dc3fb6-24d9-87d3-14ec-9a7b4859258b, 'name': SearchDatastore_Task, 'duration_secs': 0.023666} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.144816] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.145231] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 79ba6f70-c967-4abf-a2a7-c70046a2602d/79ba6f70-c967-4abf-a2a7-c70046a2602d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.145670] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-288f493c-24fa-467c-b905-654e8c84ed26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.156767] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 744.156767] env[61545]: value = "task-4255550" [ 744.156767] env[61545]: _type = "Task" [ 744.156767] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.172200] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.259365] env[61545]: DEBUG nova.compute.utils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 744.264215] env[61545]: DEBUG nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.264215] env[61545]: DEBUG nova.network.neutron [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.352659] env[61545]: DEBUG nova.policy [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb62f61940d74d3e9db31ff7f3e2456b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39f1a55fa71c4ec28278ebd71a4bf4d3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 744.474052] env[61545]: DEBUG nova.network.neutron [-] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.496118] env[61545]: DEBUG nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.517021] env[61545]: DEBUG oslo_concurrency.lockutils [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] Releasing lock "refresh_cache-79ba6f70-c967-4abf-a2a7-c70046a2602d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.517021] env[61545]: DEBUG nova.compute.manager [req-4c30f03e-e086-4ea4-818f-39f6dc7c3756 req-ebbf5e75-4604-46e7-a789-112030e65e25 service nova] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Received event network-vif-deleted-44c7a5f7-291c-4f82-ab36-b2612b504432 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 744.672174] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255550, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.683919] env[61545]: DEBUG nova.network.neutron [-] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.769200] env[61545]: DEBUG nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 744.978846] env[61545]: INFO nova.compute.manager [-] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Took 1.82 seconds to deallocate network for instance. [ 745.037329] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.057016] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ec83df-a231-4254-8c7c-22484fcffa47 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.066268] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14669e43-a5ae-4ed6-a0f3-c5144fe97e7e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.108706] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360763f9-dc86-4205-85db-7aeb5a98c5a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.116828] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8108089c-7ce0-41ed-86df-dc416a8a0b3f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.132747] env[61545]: DEBUG nova.compute.provider_tree [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.171687] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255550, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.191620] env[61545]: INFO nova.compute.manager [-] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Took 1.54 seconds to deallocate network for instance. [ 745.202243] env[61545]: DEBUG nova.network.neutron [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Successfully created port: 7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.293382] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.293933] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.329035] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "69c59bd5-1f57-4fa2-afab-348e5f57501e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.329035] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "69c59bd5-1f57-4fa2-afab-348e5f57501e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.337218] env[61545]: DEBUG nova.network.neutron [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Successfully updated port: af3b8dc6-019b-4076-b26a-f6eaaa30a979 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.487521] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.639690] env[61545]: DEBUG nova.scheduler.client.report [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.672787] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255550, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.701987] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.785488] env[61545]: DEBUG nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 745.821262] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.821588] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.821733] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.821926] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.822476] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.822716] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.822958] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.823153] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.823380] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.823534] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.823763] env[61545]: DEBUG nova.virt.hardware [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.824744] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade69e43-a340-405a-b8f8-4a26ee4e8be6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.836573] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f035de-6e7a-4fc5-96a4-2939ae25fa56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.841206] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.841352] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.841512] env[61545]: DEBUG nova.network.neutron [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.921978] env[61545]: DEBUG nova.compute.manager [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Received event network-vif-deleted-041534e2-0ec0-43ea-84f9-d0cfdec6808c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 745.922394] env[61545]: DEBUG nova.compute.manager [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Received event network-vif-deleted-f8265baf-2284-40a0-b20a-88199fb2bbda {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 745.922481] env[61545]: DEBUG nova.compute.manager [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Received event network-vif-deleted-a9fecc2f-82bc-401e-b2ac-6d840b6e25be {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 745.922776] env[61545]: DEBUG nova.compute.manager [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Received event network-vif-plugged-af3b8dc6-019b-4076-b26a-f6eaaa30a979 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 745.922882] env[61545]: DEBUG oslo_concurrency.lockutils [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] Acquiring lock "4b29ebc4-d913-447c-bc57-890953cf8d49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.923633] env[61545]: DEBUG oslo_concurrency.lockutils [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] Lock "4b29ebc4-d913-447c-bc57-890953cf8d49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.923633] env[61545]: DEBUG oslo_concurrency.lockutils [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] Lock "4b29ebc4-d913-447c-bc57-890953cf8d49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.923633] env[61545]: DEBUG nova.compute.manager [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] No waiting events found dispatching network-vif-plugged-af3b8dc6-019b-4076-b26a-f6eaaa30a979 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 745.923633] env[61545]: WARNING nova.compute.manager [req-598f639e-ce0e-43db-be5f-4df7d8e156ea req-83a1405d-5d98-402e-8006-97c903d956a6 service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Received unexpected event network-vif-plugged-af3b8dc6-019b-4076-b26a-f6eaaa30a979 for instance with vm_state building and task_state spawning. [ 746.145208] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.392s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.147942] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.859s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.148213] env[61545]: DEBUG nova.objects.instance [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 746.171770] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255550, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.812027} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.172228] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 79ba6f70-c967-4abf-a2a7-c70046a2602d/79ba6f70-c967-4abf-a2a7-c70046a2602d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 746.174461] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 746.174461] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60c73c21-5cb0-4a30-b3db-bc54302b7951 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.181022] env[61545]: INFO nova.scheduler.client.report [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Deleted allocations for instance 8214216a-0256-467e-ac4c-1d14b0f73b77 [ 746.188932] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 746.188932] env[61545]: value = "task-4255551" [ 746.188932] env[61545]: _type = "Task" [ 746.188932] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.201260] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255551, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.397978] env[61545]: DEBUG nova.network.neutron [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.637517] env[61545]: DEBUG nova.network.neutron [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Updating instance_info_cache with network_info: [{"id": "af3b8dc6-019b-4076-b26a-f6eaaa30a979", "address": "fa:16:3e:0f:50:62", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf3b8dc6-01", "ovs_interfaceid": "af3b8dc6-019b-4076-b26a-f6eaaa30a979", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.696509] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6912a313-19f8-45fc-acf2-5449a4fc7c70 tempest-ImagesNegativeTestJSON-1172652629 tempest-ImagesNegativeTestJSON-1172652629-project-member] Lock "8214216a-0256-467e-ac4c-1d14b0f73b77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.844s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.705368] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255551, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086937} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.705743] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 746.707619] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418ca3f4-15fb-40c0-9161-de16b8a4aa52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.730111] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] 79ba6f70-c967-4abf-a2a7-c70046a2602d/79ba6f70-c967-4abf-a2a7-c70046a2602d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.731110] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebd3b9e9-6173-4a1b-bb5c-5584922c31f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.753664] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 746.753664] env[61545]: value = "task-4255552" [ 746.753664] env[61545]: _type = "Task" [ 746.753664] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.764164] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255552, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.802782] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "602bd42d-6afa-4419-8352-73a9daab2fe0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.803089] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "602bd42d-6afa-4419-8352-73a9daab2fe0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.143505] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.143881] env[61545]: DEBUG nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Instance network_info: |[{"id": "af3b8dc6-019b-4076-b26a-f6eaaa30a979", "address": "fa:16:3e:0f:50:62", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf3b8dc6-01", "ovs_interfaceid": "af3b8dc6-019b-4076-b26a-f6eaaa30a979", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.144382] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:50:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af3b8dc6-019b-4076-b26a-f6eaaa30a979', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.152086] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Creating folder: Project (3a1542baa59a4be387f3fe1526116d37). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.152386] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52c5e3e9-4857-4f67-98b2-f74169918500 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.158668] env[61545]: DEBUG oslo_concurrency.lockutils [None req-662c01f8-329c-4ae8-b9b1-b39c9d5d345e tempest-ServersAdmin275Test-1719673761 tempest-ServersAdmin275Test-1719673761-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.160323] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.109s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.162280] env[61545]: INFO nova.compute.claims [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.167149] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Created folder: Project (3a1542baa59a4be387f3fe1526116d37) in parent group-v838542. [ 747.167402] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Creating folder: Instances. Parent ref: group-v838614. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.167737] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9634b82-e3b1-48af-a762-0cd110c24408 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.178634] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Created folder: Instances in parent group-v838614. [ 747.178909] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.179122] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.179338] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59eb11eb-8c05-429d-ab29-3a58532cc5f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.201238] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.201238] env[61545]: value = "task-4255555" [ 747.201238] env[61545]: _type = "Task" [ 747.201238] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.211121] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255555, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.264245] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255552, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.399112] env[61545]: DEBUG nova.network.neutron [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Successfully updated port: 7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.715059] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255555, 'name': CreateVM_Task, 'duration_secs': 0.431436} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.715059] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 747.715059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.715059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.715059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 747.715059] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ce5e636-b5d5-4c9c-bd50-49505af293dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.722012] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 747.722012] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52248ce5-cdbe-95d6-f216-3493d5e84d97" [ 747.722012] env[61545]: _type = "Task" [ 747.722012] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.731954] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52248ce5-cdbe-95d6-f216-3493d5e84d97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.766404] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255552, 'name': ReconfigVM_Task, 'duration_secs': 0.520032} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.766699] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Reconfigured VM instance instance-00000018 to attach disk [datastore2] 79ba6f70-c967-4abf-a2a7-c70046a2602d/79ba6f70-c967-4abf-a2a7-c70046a2602d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 747.767453] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e540a96-7b7f-4d07-aff7-681d04535e28 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.774956] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 747.774956] env[61545]: value = "task-4255556" [ 747.774956] env[61545]: _type = "Task" [ 747.774956] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.786806] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255556, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.902887] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.903084] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquired lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.903381] env[61545]: DEBUG nova.network.neutron [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 747.971875] env[61545]: DEBUG nova.compute.manager [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Received event network-changed-af3b8dc6-019b-4076-b26a-f6eaaa30a979 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 747.972343] env[61545]: DEBUG nova.compute.manager [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Refreshing instance network info cache due to event network-changed-af3b8dc6-019b-4076-b26a-f6eaaa30a979. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 747.972602] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Acquiring lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.972768] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Acquired lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.973046] env[61545]: DEBUG nova.network.neutron [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Refreshing network info cache for port af3b8dc6-019b-4076-b26a-f6eaaa30a979 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.235490] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52248ce5-cdbe-95d6-f216-3493d5e84d97, 'name': SearchDatastore_Task, 'duration_secs': 0.012994} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.235819] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.236068] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.237230] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.237230] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.237230] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.238591] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adc343ae-2e89-4a3b-8d9c-e9a5472ad9ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.249793] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.250022] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.250761] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f578d7b1-9875-4d31-b2d6-3968222d82ca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.256468] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 748.256468] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e89e93-d7c6-6667-c2f0-493e63d07f83" [ 748.256468] env[61545]: _type = "Task" [ 748.256468] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.265282] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e89e93-d7c6-6667-c2f0-493e63d07f83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.292244] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255556, 'name': Rename_Task, 'duration_secs': 0.159532} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.298178] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.299070] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-516352e0-09f0-4470-974c-6a71eef2946f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.308676] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 748.308676] env[61545]: value = "task-4255557" [ 748.308676] env[61545]: _type = "Task" [ 748.308676] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.325401] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.478474] env[61545]: DEBUG nova.network.neutron [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.770142] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e89e93-d7c6-6667-c2f0-493e63d07f83, 'name': SearchDatastore_Task, 'duration_secs': 0.010006} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.771056] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7300e85-a280-4f17-a5c2-ad766b913df1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.781438] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 748.781438] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b2fc3f-bba0-a06e-46ef-dc809fb2e628" [ 748.781438] env[61545]: _type = "Task" [ 748.781438] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.797921] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b2fc3f-bba0-a06e-46ef-dc809fb2e628, 'name': SearchDatastore_Task, 'duration_secs': 0.011964} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.797921] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.797921] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49/4b29ebc4-d913-447c-bc57-890953cf8d49.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 748.797921] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da41d81f-463b-4fff-a496-dc4a63bc1f29 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.809034] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 748.809034] env[61545]: value = "task-4255558" [ 748.809034] env[61545]: _type = "Task" [ 748.809034] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.823167] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.827303] env[61545]: DEBUG oslo_vmware.api [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255557, 'name': PowerOnVM_Task, 'duration_secs': 0.479673} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.827456] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 748.827676] env[61545]: INFO nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Took 11.62 seconds to spawn the instance on the hypervisor. [ 748.827909] env[61545]: DEBUG nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 748.828908] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4203c889-3b96-42a4-9f62-049229cf3bef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.846626] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d33ecc-fa7b-42f0-8ccb-08c58404d7d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.856910] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b253ebd-bd02-4fdd-8eb3-ab19519648a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.896314] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3609cfe-3006-4d9c-b84c-5a694f4388c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.905671] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba4948c-fba2-4968-8512-83f945386a52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.921051] env[61545]: DEBUG nova.compute.provider_tree [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.153649] env[61545]: DEBUG nova.network.neutron [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Updating instance_info_cache with network_info: [{"id": "7e56c9b7-f0cb-41e5-b513-077c74cba86c", "address": "fa:16:3e:37:c4:ae", "network": {"id": "d43bb417-a08b-414d-aadd-b9e754ff3612", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1610703633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39f1a55fa71c4ec28278ebd71a4bf4d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e56c9b7-f0", "ovs_interfaceid": "7e56c9b7-f0cb-41e5-b513-077c74cba86c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.234380] env[61545]: DEBUG nova.network.neutron [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Updated VIF entry in instance network info cache for port af3b8dc6-019b-4076-b26a-f6eaaa30a979. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 749.234867] env[61545]: DEBUG nova.network.neutron [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Updating instance_info_cache with network_info: [{"id": "af3b8dc6-019b-4076-b26a-f6eaaa30a979", "address": "fa:16:3e:0f:50:62", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf3b8dc6-01", "ovs_interfaceid": "af3b8dc6-019b-4076-b26a-f6eaaa30a979", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.317759] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255558, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.360685] env[61545]: INFO nova.compute.manager [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Took 48.72 seconds to build instance. [ 749.424398] env[61545]: DEBUG nova.scheduler.client.report [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.659116] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Releasing lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.659116] env[61545]: DEBUG nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Instance network_info: |[{"id": "7e56c9b7-f0cb-41e5-b513-077c74cba86c", "address": "fa:16:3e:37:c4:ae", "network": {"id": "d43bb417-a08b-414d-aadd-b9e754ff3612", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1610703633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39f1a55fa71c4ec28278ebd71a4bf4d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e56c9b7-f0", "ovs_interfaceid": "7e56c9b7-f0cb-41e5-b513-077c74cba86c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 749.659690] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:c4:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15165046-2de9-4ada-9e99-0126e20854a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e56c9b7-f0cb-41e5-b513-077c74cba86c', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.668973] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Creating folder: Project (39f1a55fa71c4ec28278ebd71a4bf4d3). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.668973] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e6b8d6a-3029-47ac-8d06-22e5be3995fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.681716] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Created folder: Project (39f1a55fa71c4ec28278ebd71a4bf4d3) in parent group-v838542. [ 749.681911] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Creating folder: Instances. Parent ref: group-v838617. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.682197] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecf37db7-2099-42c8-a2ef-f27832c4bcce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.694241] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Created folder: Instances in parent group-v838617. [ 749.695187] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.695187] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 749.695187] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e874240-d524-4915-aa29-ba483124af54 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.719084] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.719084] env[61545]: value = "task-4255561" [ 749.719084] env[61545]: _type = "Task" [ 749.719084] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.728872] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255561, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.738291] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Releasing lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.738563] env[61545]: DEBUG nova.compute.manager [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Received event network-vif-plugged-7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 749.738779] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Acquiring lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.739038] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.739212] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.740033] env[61545]: DEBUG nova.compute.manager [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] No waiting events found dispatching network-vif-plugged-7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 749.740033] env[61545]: WARNING nova.compute.manager [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Received unexpected event network-vif-plugged-7e56c9b7-f0cb-41e5-b513-077c74cba86c for instance with vm_state building and task_state spawning. [ 749.740033] env[61545]: DEBUG nova.compute.manager [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Received event network-changed-7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 749.740033] env[61545]: DEBUG nova.compute.manager [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Refreshing instance network info cache due to event network-changed-7e56c9b7-f0cb-41e5-b513-077c74cba86c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 749.740215] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Acquiring lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.740245] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Acquired lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.740675] env[61545]: DEBUG nova.network.neutron [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Refreshing network info cache for port 7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.820420] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512687} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.820766] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49/4b29ebc4-d913-447c-bc57-890953cf8d49.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 749.820983] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.821351] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75d641b0-bab2-4303-bd75-55bbded8b748 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.830431] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 749.830431] env[61545]: value = "task-4255562" [ 749.830431] env[61545]: _type = "Task" [ 749.830431] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.841655] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.863402] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b0fe4336-85e3-4877-853b-0879f2c52963 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "79ba6f70-c967-4abf-a2a7-c70046a2602d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.426s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.929261] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.769s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.929810] env[61545]: DEBUG nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 749.932757] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.741s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.933033] env[61545]: DEBUG nova.objects.instance [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lazy-loading 'resources' on Instance uuid 5719daa8-a5bc-4604-b465-a57097695c6d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.237171] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255561, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.259105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "79ba6f70-c967-4abf-a2a7-c70046a2602d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.259427] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "79ba6f70-c967-4abf-a2a7-c70046a2602d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.259768] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "79ba6f70-c967-4abf-a2a7-c70046a2602d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.260083] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "79ba6f70-c967-4abf-a2a7-c70046a2602d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.260362] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "79ba6f70-c967-4abf-a2a7-c70046a2602d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.265021] env[61545]: INFO nova.compute.manager [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Terminating instance [ 750.347911] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.208341} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.348536] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.349397] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171a72c1-a563-48af-84a5-7c0903e60057 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.366075] env[61545]: DEBUG nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 750.381119] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49/4b29ebc4-d913-447c-bc57-890953cf8d49.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.381898] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-335558b8-2697-4e7a-96ef-73d4c006c4c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.399563] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "72656070-cfd0-4104-a9c7-ec20c5a6238a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.399876] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "72656070-cfd0-4104-a9c7-ec20c5a6238a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.400151] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "72656070-cfd0-4104-a9c7-ec20c5a6238a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.400388] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "72656070-cfd0-4104-a9c7-ec20c5a6238a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.400598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "72656070-cfd0-4104-a9c7-ec20c5a6238a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.403796] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 750.403796] env[61545]: value = "task-4255563" [ 750.403796] env[61545]: _type = "Task" [ 750.403796] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.404901] env[61545]: INFO nova.compute.manager [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Terminating instance [ 750.417256] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255563, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.437128] env[61545]: DEBUG nova.compute.utils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 750.441901] env[61545]: DEBUG nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 750.442629] env[61545]: DEBUG nova.network.neutron [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.559485] env[61545]: DEBUG nova.policy [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '820afec002514775bd94a71b4d4547a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e060fc87d3ea4aa9bb25853eeeca3c23', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 750.733326] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255561, 'name': CreateVM_Task, 'duration_secs': 0.710794} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.738807] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.739852] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.739998] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.740362] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 750.740974] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f67aeeb-75bf-468f-ad7b-91e59879e2c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.748063] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 750.748063] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c092a-0d23-7e2c-b7c1-c342c497645e" [ 750.748063] env[61545]: _type = "Task" [ 750.748063] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.757916] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c092a-0d23-7e2c-b7c1-c342c497645e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.766971] env[61545]: DEBUG nova.compute.manager [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.767221] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.768175] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0ee952-7d21-41db-873b-83eeee75208f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.780320] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.780608] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea138b0b-3d1a-4625-9592-1f7013130f2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.789325] env[61545]: DEBUG oslo_vmware.api [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 750.789325] env[61545]: value = "task-4255564" [ 750.789325] env[61545]: _type = "Task" [ 750.789325] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.799107] env[61545]: DEBUG oslo_vmware.api [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.848570] env[61545]: DEBUG nova.network.neutron [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Updated VIF entry in instance network info cache for port 7e56c9b7-f0cb-41e5-b513-077c74cba86c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.853304] env[61545]: DEBUG nova.network.neutron [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Updating instance_info_cache with network_info: [{"id": "7e56c9b7-f0cb-41e5-b513-077c74cba86c", "address": "fa:16:3e:37:c4:ae", "network": {"id": "d43bb417-a08b-414d-aadd-b9e754ff3612", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1610703633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39f1a55fa71c4ec28278ebd71a4bf4d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e56c9b7-f0", "ovs_interfaceid": "7e56c9b7-f0cb-41e5-b513-077c74cba86c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.903654] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.917232] env[61545]: DEBUG nova.compute.manager [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.917232] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.917617] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea1806a-bb96-4004-89b6-c4f56586276b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.928348] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255563, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.933204] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.933570] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dedc72bf-5452-48d5-876a-130245af5e25 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.940914] env[61545]: DEBUG oslo_vmware.api [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 750.940914] env[61545]: value = "task-4255565" [ 750.940914] env[61545]: _type = "Task" [ 750.940914] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.946217] env[61545]: DEBUG nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 750.962463] env[61545]: DEBUG oslo_vmware.api [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.117651] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc18ffbd-6ad0-43f1-9baa-52aeeb6b40da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.126170] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4a6d22-0d2f-4fc2-ace0-6dc4ee3209d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.161133] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e2f665-de72-4c0b-898e-c9515096307b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.169852] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de43870f-e5f2-495a-a355-c3d283e99548 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.186025] env[61545]: DEBUG nova.compute.provider_tree [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.239098] env[61545]: DEBUG nova.network.neutron [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Successfully created port: 9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.257494] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c092a-0d23-7e2c-b7c1-c342c497645e, 'name': SearchDatastore_Task, 'duration_secs': 0.011884} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.258598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.258598] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.258598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.258598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.258790] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.259732] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d86635d-ed55-48b1-8bbe-8a02df38caf2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.268932] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.269132] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.269866] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a49f291-ed91-4088-a5aa-45be189fdc3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.275540] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 751.275540] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5205d4bb-cb3b-552f-bee4-0afe26cef5ba" [ 751.275540] env[61545]: _type = "Task" [ 751.275540] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.284051] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5205d4bb-cb3b-552f-bee4-0afe26cef5ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.298696] env[61545]: DEBUG oslo_vmware.api [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255564, 'name': PowerOffVM_Task, 'duration_secs': 0.219842} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.298964] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.299152] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.299398] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-770b8ce9-5c80-486d-9cc0-31e4f45d0e32 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.356549] env[61545]: DEBUG oslo_concurrency.lockutils [req-ce2299aa-170c-4424-861e-ce8138044415 req-6c7b2712-12c9-419e-b7b1-95b6de5e4bcd service nova] Releasing lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.360478] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 751.360693] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 751.360876] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleting the datastore file [datastore2] 79ba6f70-c967-4abf-a2a7-c70046a2602d {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.361177] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5b9e229-a92d-49dd-9c4d-c9ce54e48ea2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.369866] env[61545]: DEBUG oslo_vmware.api [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 751.369866] env[61545]: value = "task-4255567" [ 751.369866] env[61545]: _type = "Task" [ 751.369866] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.379180] env[61545]: DEBUG oslo_vmware.api [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.420557] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255563, 'name': ReconfigVM_Task, 'duration_secs': 0.64624} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.421097] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49/4b29ebc4-d913-447c-bc57-890953cf8d49.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.422019] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5bf4998-16ad-4672-b5a7-0811214ef42c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.429356] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 751.429356] env[61545]: value = "task-4255568" [ 751.429356] env[61545]: _type = "Task" [ 751.429356] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.438656] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255568, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.450562] env[61545]: DEBUG oslo_vmware.api [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255565, 'name': PowerOffVM_Task, 'duration_secs': 0.218993} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.450833] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.451054] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.451329] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b6ba57b-2de0-42b3-aa6b-9f70209c2568 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.522803] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 751.523113] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 751.523317] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleting the datastore file [datastore2] 72656070-cfd0-4104-a9c7-ec20c5a6238a {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.523621] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a21b587-5542-473a-b073-e43e385f7057 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.531740] env[61545]: DEBUG oslo_vmware.api [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 751.531740] env[61545]: value = "task-4255570" [ 751.531740] env[61545]: _type = "Task" [ 751.531740] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.541647] env[61545]: DEBUG oslo_vmware.api [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.689494] env[61545]: DEBUG nova.scheduler.client.report [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.788464] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5205d4bb-cb3b-552f-bee4-0afe26cef5ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010428} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.789576] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f293ab3-4533-478c-89e6-ca4b23973e2d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.803023] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 751.803023] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c070a3-0212-0655-3488-bc0e9b8fbf93" [ 751.803023] env[61545]: _type = "Task" [ 751.803023] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.810125] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c070a3-0212-0655-3488-bc0e9b8fbf93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.883151] env[61545]: DEBUG oslo_vmware.api [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141612} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.883919] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 751.884245] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 751.884514] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.884789] env[61545]: INFO nova.compute.manager [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 751.885141] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.885463] env[61545]: DEBUG nova.compute.manager [-] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 751.885709] env[61545]: DEBUG nova.network.neutron [-] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.941201] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255568, 'name': Rename_Task, 'duration_secs': 0.134281} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.941201] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 751.941286] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36fb2f25-7e99-481e-935a-7b1978b388ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.948408] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 751.948408] env[61545]: value = "task-4255571" [ 751.948408] env[61545]: _type = "Task" [ 751.948408] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.960464] env[61545]: DEBUG nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 751.963612] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.992172] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.992424] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.992585] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.992880] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.992961] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.993932] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.994207] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.994385] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.994710] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.995192] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.995434] env[61545]: DEBUG nova.virt.hardware [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.996530] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e318025f-22aa-4905-8912-37c097732a11 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.007222] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac921a18-455a-40cb-a460-4fe9e69a2d65 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.042473] env[61545]: DEBUG oslo_vmware.api [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136633} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.042731] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.042960] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 752.043206] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 752.043335] env[61545]: INFO nova.compute.manager [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 752.043612] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 752.043813] env[61545]: DEBUG nova.compute.manager [-] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 752.043904] env[61545]: DEBUG nova.network.neutron [-] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.196211] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.262s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.198923] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.884s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.203543] env[61545]: INFO nova.compute.claims [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.229560] env[61545]: INFO nova.scheduler.client.report [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Deleted allocations for instance 5719daa8-a5bc-4604-b465-a57097695c6d [ 752.312975] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c070a3-0212-0655-3488-bc0e9b8fbf93, 'name': SearchDatastore_Task, 'duration_secs': 0.010282} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.313266] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.313520] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d7e25ea6-7076-4ab2-aed6-fe5232c2665d/d7e25ea6-7076-4ab2-aed6-fe5232c2665d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 752.313855] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8916beb-b1c1-496f-b198-88458c0b7c3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.322047] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 752.322047] env[61545]: value = "task-4255572" [ 752.322047] env[61545]: _type = "Task" [ 752.322047] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.335965] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.459274] env[61545]: DEBUG oslo_vmware.api [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255571, 'name': PowerOnVM_Task, 'duration_secs': 0.481099} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.459570] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.459785] env[61545]: INFO nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Took 9.59 seconds to spawn the instance on the hypervisor. [ 752.459986] env[61545]: DEBUG nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.460841] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1dba64-ca52-4106-98ad-0a1fde014a88 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.479911] env[61545]: DEBUG nova.compute.manager [req-383ac942-2d7a-476b-ab7c-bd1812ab8452 req-092d8acb-5dd4-4c7b-8bc4-f08859ed61ec service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Received event network-vif-deleted-0c7a22d0-b4ff-436d-bb77-faa9c853dd35 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 752.479993] env[61545]: INFO nova.compute.manager [req-383ac942-2d7a-476b-ab7c-bd1812ab8452 req-092d8acb-5dd4-4c7b-8bc4-f08859ed61ec service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Neutron deleted interface 0c7a22d0-b4ff-436d-bb77-faa9c853dd35; detaching it from the instance and deleting it from the info cache [ 752.480210] env[61545]: DEBUG nova.network.neutron [req-383ac942-2d7a-476b-ab7c-bd1812ab8452 req-092d8acb-5dd4-4c7b-8bc4-f08859ed61ec service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.652621] env[61545]: DEBUG nova.compute.manager [req-2bb2d28c-2c6c-4e44-bfe9-e078691113a6 req-206087ab-08dc-434f-ac7e-a430a6804d33 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Received event network-vif-deleted-832392a5-d47c-47ed-9aaa-c361e28e2418 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 752.652805] env[61545]: INFO nova.compute.manager [req-2bb2d28c-2c6c-4e44-bfe9-e078691113a6 req-206087ab-08dc-434f-ac7e-a430a6804d33 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Neutron deleted interface 832392a5-d47c-47ed-9aaa-c361e28e2418; detaching it from the instance and deleting it from the info cache [ 752.652991] env[61545]: DEBUG nova.network.neutron [req-2bb2d28c-2c6c-4e44-bfe9-e078691113a6 req-206087ab-08dc-434f-ac7e-a430a6804d33 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.741109] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42319d63-5862-421a-b972-a15c77116854 tempest-ServersAdmin275Test-675594425 tempest-ServersAdmin275Test-675594425-project-member] Lock "5719daa8-a5bc-4604-b465-a57097695c6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.999s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.835887] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255572, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.912092] env[61545]: DEBUG nova.network.neutron [-] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.990231] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ba01544-c398-4fb0-a7e7-826691ecadd7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.992385] env[61545]: INFO nova.compute.manager [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Took 49.57 seconds to build instance. [ 753.003983] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c6cd39-18c7-461b-b0d5-184cf2013e3a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.039900] env[61545]: DEBUG nova.compute.manager [req-383ac942-2d7a-476b-ab7c-bd1812ab8452 req-092d8acb-5dd4-4c7b-8bc4-f08859ed61ec service nova] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Detach interface failed, port_id=0c7a22d0-b4ff-436d-bb77-faa9c853dd35, reason: Instance 72656070-cfd0-4104-a9c7-ec20c5a6238a could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 753.042205] env[61545]: DEBUG nova.network.neutron [-] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.156117] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c444f2b0-a583-49bd-befd-db76fa75a41c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.166482] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b25000b-d378-4978-ac70-c90a62743caf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.201778] env[61545]: DEBUG nova.compute.manager [req-2bb2d28c-2c6c-4e44-bfe9-e078691113a6 req-206087ab-08dc-434f-ac7e-a430a6804d33 service nova] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Detach interface failed, port_id=832392a5-d47c-47ed-9aaa-c361e28e2418, reason: Instance 79ba6f70-c967-4abf-a2a7-c70046a2602d could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 753.333912] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255572, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533956} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.334638] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d7e25ea6-7076-4ab2-aed6-fe5232c2665d/d7e25ea6-7076-4ab2-aed6-fe5232c2665d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.334638] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.334777] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b95f5b58-de64-4689-ac3b-dbb4ae44eb51 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.346773] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 753.346773] env[61545]: value = "task-4255573" [ 753.346773] env[61545]: _type = "Task" [ 753.346773] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.357431] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.416463] env[61545]: INFO nova.compute.manager [-] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Took 1.37 seconds to deallocate network for instance. [ 753.496660] env[61545]: DEBUG oslo_concurrency.lockutils [None req-113fb754-a72a-4f91-b482-a18c046be08f tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "4b29ebc4-d913-447c-bc57-890953cf8d49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.851s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.509727] env[61545]: DEBUG nova.network.neutron [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Successfully updated port: 9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.548282] env[61545]: INFO nova.compute.manager [-] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Took 1.66 seconds to deallocate network for instance. [ 753.859860] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082441} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.859860] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 753.860329] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c51e4e4-a732-49e1-b3f1-94a14b015e4e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.884592] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] d7e25ea6-7076-4ab2-aed6-fe5232c2665d/d7e25ea6-7076-4ab2-aed6-fe5232c2665d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.887753] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-458aa6d9-10af-4d71-8366-2e5d713bd4e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.912550] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 753.912550] env[61545]: value = "task-4255574" [ 753.912550] env[61545]: _type = "Task" [ 753.912550] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.921736] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255574, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.929382] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.983172] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7080aeb-7fff-48cd-90ef-e6f550c9d242 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.990149] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0abe4f-2f95-44d4-8846-59e665ef368b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.025745] env[61545]: DEBUG nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 754.029541] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.029844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.030117] env[61545]: DEBUG nova.network.neutron [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.032645] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc37722-64b2-4ef9-ad7b-db165ef7e16e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.042233] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3843ede-577d-4321-bb59-34df327e7374 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.059577] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.062283] env[61545]: DEBUG nova.compute.provider_tree [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.423561] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255574, 'name': ReconfigVM_Task, 'duration_secs': 0.311072} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.424231] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Reconfigured VM instance instance-0000001a to attach disk [datastore2] d7e25ea6-7076-4ab2-aed6-fe5232c2665d/d7e25ea6-7076-4ab2-aed6-fe5232c2665d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.424923] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76e349c2-de34-435c-951f-bd0a0a136961 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.431417] env[61545]: INFO nova.compute.manager [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Rescuing [ 754.431712] env[61545]: DEBUG oslo_concurrency.lockutils [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.431978] env[61545]: DEBUG oslo_concurrency.lockutils [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.432196] env[61545]: DEBUG nova.network.neutron [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.436347] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 754.436347] env[61545]: value = "task-4255575" [ 754.436347] env[61545]: _type = "Task" [ 754.436347] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.444791] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255575, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.566701] env[61545]: DEBUG nova.scheduler.client.report [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 754.574033] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.629156] env[61545]: DEBUG nova.network.neutron [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.935798] env[61545]: DEBUG nova.network.neutron [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updating instance_info_cache with network_info: [{"id": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "address": "fa:16:3e:e3:1c:91", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cc5f1dc-e8", "ovs_interfaceid": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.951381] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255575, 'name': Rename_Task, 'duration_secs': 0.201455} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.951734] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 754.952037] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d22e829e-6d33-44b0-a536-556f9c6d34d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.960250] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 754.960250] env[61545]: value = "task-4255576" [ 754.960250] env[61545]: _type = "Task" [ 754.960250] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.970013] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.074487] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.876s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.075016] env[61545]: DEBUG nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 755.078279] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 32.121s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.197433] env[61545]: DEBUG nova.compute.manager [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Received event network-vif-plugged-9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 755.197649] env[61545]: DEBUG oslo_concurrency.lockutils [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] Acquiring lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.197859] env[61545]: DEBUG oslo_concurrency.lockutils [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] Lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.198259] env[61545]: DEBUG oslo_concurrency.lockutils [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] Lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.198529] env[61545]: DEBUG nova.compute.manager [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] No waiting events found dispatching network-vif-plugged-9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 755.198796] env[61545]: WARNING nova.compute.manager [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Received unexpected event network-vif-plugged-9cc5f1dc-e836-46e6-8584-5c4d98ba5241 for instance with vm_state building and task_state spawning. [ 755.198971] env[61545]: DEBUG nova.compute.manager [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Received event network-changed-9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 755.199147] env[61545]: DEBUG nova.compute.manager [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Refreshing instance network info cache due to event network-changed-9cc5f1dc-e836-46e6-8584-5c4d98ba5241. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 755.199320] env[61545]: DEBUG oslo_concurrency.lockutils [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] Acquiring lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.312452] env[61545]: DEBUG nova.network.neutron [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Updating instance_info_cache with network_info: [{"id": "af3b8dc6-019b-4076-b26a-f6eaaa30a979", "address": "fa:16:3e:0f:50:62", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf3b8dc6-01", "ovs_interfaceid": "af3b8dc6-019b-4076-b26a-f6eaaa30a979", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.442228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Releasing lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.442659] env[61545]: DEBUG nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance network_info: |[{"id": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "address": "fa:16:3e:e3:1c:91", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cc5f1dc-e8", "ovs_interfaceid": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 755.443085] env[61545]: DEBUG oslo_concurrency.lockutils [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] Acquired lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.443307] env[61545]: DEBUG nova.network.neutron [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Refreshing network info cache for port 9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.444969] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:1c:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da623279-b6f6-4570-8b15-a332120b8b60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cc5f1dc-e836-46e6-8584-5c4d98ba5241', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.458299] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.460765] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.460765] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47415dcc-d7ff-47a5-aeca-9feaa6d8c028 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.489842] env[61545]: DEBUG oslo_vmware.api [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255576, 'name': PowerOnVM_Task, 'duration_secs': 0.507213} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.491490] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.491725] env[61545]: INFO nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Took 9.71 seconds to spawn the instance on the hypervisor. [ 755.492535] env[61545]: DEBUG nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 755.492535] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.492535] env[61545]: value = "task-4255577" [ 755.492535] env[61545]: _type = "Task" [ 755.492535] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.493163] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0ccc2a-229d-49d4-8f4f-988edc0b0662 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.515945] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255577, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.518238] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "ecf98c79-da3d-44be-9c76-c3fccc688235" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.518238] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "ecf98c79-da3d-44be-9c76-c3fccc688235" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.586251] env[61545]: DEBUG nova.compute.utils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 755.592193] env[61545]: DEBUG nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 755.593325] env[61545]: DEBUG nova.network.neutron [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 755.685497] env[61545]: DEBUG nova.policy [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b6c2288f80c4acc9b3fd0c45b003b77', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2aa5551c05f24e018e6b7c73a4310fae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 755.814018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "refresh_cache-4b29ebc4-d913-447c-bc57-890953cf8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.009080] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255577, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.023872] env[61545]: INFO nova.compute.manager [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Took 46.19 seconds to build instance. [ 756.096862] env[61545]: DEBUG nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 756.284098] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0329ee-cce4-42b4-b2ab-9e8f642b57c7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.294622] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906e7bbd-b65d-41a4-b678-6970ef3c47b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.351234] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f4f70e-2167-407a-a9a6-ec0d10f5a9ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.364562] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fd9d48-2964-4cb7-b607-fb267da9c6cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.388748] env[61545]: DEBUG nova.compute.provider_tree [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.456675] env[61545]: DEBUG nova.network.neutron [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Successfully created port: 20ad03f5-59d2-4cd0-b053-4d59cc02f11b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.509183] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255577, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.525822] env[61545]: DEBUG oslo_concurrency.lockutils [None req-24b81d36-81cc-4e2e-91cd-01b289d371db tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.742s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.675636] env[61545]: DEBUG nova.network.neutron [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updated VIF entry in instance network info cache for port 9cc5f1dc-e836-46e6-8584-5c4d98ba5241. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 756.676070] env[61545]: DEBUG nova.network.neutron [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updating instance_info_cache with network_info: [{"id": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "address": "fa:16:3e:e3:1c:91", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cc5f1dc-e8", "ovs_interfaceid": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.895070] env[61545]: DEBUG nova.scheduler.client.report [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.011588] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255577, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.029711] env[61545]: DEBUG nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 757.110297] env[61545]: DEBUG nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 757.142189] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 757.142453] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.142613] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 757.142857] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.143438] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 757.143438] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 757.143438] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 757.144064] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 757.144150] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 757.145379] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 757.145379] env[61545]: DEBUG nova.virt.hardware [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 757.145707] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8d5fd3-b0f4-402b-a6fd-b27a18c0046c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.154949] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321a1d70-84f9-4acd-ad25-785f4aba26b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.179182] env[61545]: DEBUG oslo_concurrency.lockutils [req-39ed1679-0373-4831-915c-45faa6481579 req-e4b0469b-3cbf-4ec6-84a7-fb3af7521a37 service nova] Releasing lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.366864] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 757.367255] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-884f3f8c-5cc7-4c0d-91c1-c8b505f0f8f0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.377781] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 757.377781] env[61545]: value = "task-4255578" [ 757.377781] env[61545]: _type = "Task" [ 757.377781] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.396367] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.513705] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255577, 'name': CreateVM_Task, 'duration_secs': 1.747315} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.513990] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 757.515241] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.515449] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.516197] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 757.516541] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec5680bd-7a35-45df-b541-02823db98bba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.523951] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 757.523951] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cb7af8-86ab-bed0-87f6-98df0758a16e" [ 757.523951] env[61545]: _type = "Task" [ 757.523951] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.536874] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cb7af8-86ab-bed0-87f6-98df0758a16e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.571839] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.889714] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255578, 'name': PowerOffVM_Task, 'duration_secs': 0.204498} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.890035] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 757.890925] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8d304e-28fa-4128-ac95-ffc91b46659c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.910405] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.832s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.914123] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.671s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.914123] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.914123] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 757.914123] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.035s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.915326] env[61545]: INFO nova.compute.claims [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.922212] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68884d7-b190-426b-a2b2-94b886b47209 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.922212] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b5a294-c447-4ce0-a09b-3422cb3cb841 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.933597] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6528f4d-bea6-46ef-bc65-032a2d9552e1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.952924] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb6e41a-0559-4c84-8837-5d488a53ea87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.963608] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7156e22c-3a90-490c-acea-924ddcde3f50 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.995919] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179570MB free_disk=245GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 757.995919] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.999689] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 758.000387] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7872536d-4683-4d0b-badb-5efd3633a522 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.009662] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 758.009662] env[61545]: value = "task-4255579" [ 758.009662] env[61545]: _type = "Task" [ 758.009662] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.020319] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255579, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.035859] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cb7af8-86ab-bed0-87f6-98df0758a16e, 'name': SearchDatastore_Task, 'duration_secs': 0.021783} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.036234] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.036726] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 758.036844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.036920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.037144] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 758.037459] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c8c5acd-4fcb-44ef-94a9-a0e5bb56e34d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.048872] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 758.049100] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 758.050080] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fa1d0b3-38c1-4cee-a00b-6f70f7e85f3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.057309] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 758.057309] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5279a41e-527c-a83c-1333-d840a42160cf" [ 758.057309] env[61545]: _type = "Task" [ 758.057309] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.066788] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5279a41e-527c-a83c-1333-d840a42160cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.121556] env[61545]: DEBUG nova.compute.manager [req-743ff3cb-7cbb-4b47-a276-83cdefef8ee4 req-6b68cf6a-e58b-41df-b4f4-d2891e6147e0 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Received event network-changed-7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 758.122096] env[61545]: DEBUG nova.compute.manager [req-743ff3cb-7cbb-4b47-a276-83cdefef8ee4 req-6b68cf6a-e58b-41df-b4f4-d2891e6147e0 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Refreshing instance network info cache due to event network-changed-7e56c9b7-f0cb-41e5-b513-077c74cba86c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 758.124016] env[61545]: DEBUG oslo_concurrency.lockutils [req-743ff3cb-7cbb-4b47-a276-83cdefef8ee4 req-6b68cf6a-e58b-41df-b4f4-d2891e6147e0 service nova] Acquiring lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.124016] env[61545]: DEBUG oslo_concurrency.lockutils [req-743ff3cb-7cbb-4b47-a276-83cdefef8ee4 req-6b68cf6a-e58b-41df-b4f4-d2891e6147e0 service nova] Acquired lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.124016] env[61545]: DEBUG nova.network.neutron [req-743ff3cb-7cbb-4b47-a276-83cdefef8ee4 req-6b68cf6a-e58b-41df-b4f4-d2891e6147e0 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Refreshing network info cache for port 7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.506983] env[61545]: INFO nova.scheduler.client.report [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Deleted allocation for migration 411a6b97-0992-4370-953c-53dc6fad8c98 [ 758.525413] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 758.525530] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 758.525725] env[61545]: DEBUG oslo_concurrency.lockutils [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.569738] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5279a41e-527c-a83c-1333-d840a42160cf, 'name': SearchDatastore_Task, 'duration_secs': 0.013472} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.570558] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e34f0eb5-11a0-4f19-83da-19d854b956a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.577346] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 758.577346] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d7b02-24fb-63c2-3863-6f8994967689" [ 758.577346] env[61545]: _type = "Task" [ 758.577346] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.588686] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d7b02-24fb-63c2-3863-6f8994967689, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.019524] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e9241f55-eafc-4647-8e99-244c58a9738d tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 39.628s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.036260] env[61545]: DEBUG nova.network.neutron [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Successfully updated port: 20ad03f5-59d2-4cd0-b053-4d59cc02f11b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 759.090349] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d7b02-24fb-63c2-3863-6f8994967689, 'name': SearchDatastore_Task, 'duration_secs': 0.011661} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.090618] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.090867] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5a284df5-88ea-43bf-9944-ef344f99591c/5a284df5-88ea-43bf-9944-ef344f99591c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 759.092429] env[61545]: DEBUG oslo_concurrency.lockutils [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.092429] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.092429] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac6df7ad-e0e7-42a7-bf44-495fd6010b2f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.095796] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1721787c-083c-4ac9-bed4-f46b8170f844 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.104796] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 759.104796] env[61545]: value = "task-4255580" [ 759.104796] env[61545]: _type = "Task" [ 759.104796] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.106092] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.106309] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.111916] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-345b6f5f-fe2a-472b-b897-bcfcb3065c69 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.119704] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 759.119704] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52091270-7e03-7592-fe91-f1d2ff66ef56" [ 759.119704] env[61545]: _type = "Task" [ 759.119704] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.123333] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.137143] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52091270-7e03-7592-fe91-f1d2ff66ef56, 'name': SearchDatastore_Task, 'duration_secs': 0.010197} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.137971] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c978d3a-40af-4a76-8804-ca8b6e71babb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.145995] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 759.145995] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f14388-0540-6620-5a09-ace59f93e305" [ 759.145995] env[61545]: _type = "Task" [ 759.145995] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.155582] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f14388-0540-6620-5a09-ace59f93e305, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.162401] env[61545]: DEBUG nova.network.neutron [req-743ff3cb-7cbb-4b47-a276-83cdefef8ee4 req-6b68cf6a-e58b-41df-b4f4-d2891e6147e0 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Updated VIF entry in instance network info cache for port 7e56c9b7-f0cb-41e5-b513-077c74cba86c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 759.162755] env[61545]: DEBUG nova.network.neutron [req-743ff3cb-7cbb-4b47-a276-83cdefef8ee4 req-6b68cf6a-e58b-41df-b4f4-d2891e6147e0 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Updating instance_info_cache with network_info: [{"id": "7e56c9b7-f0cb-41e5-b513-077c74cba86c", "address": "fa:16:3e:37:c4:ae", "network": {"id": "d43bb417-a08b-414d-aadd-b9e754ff3612", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1610703633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39f1a55fa71c4ec28278ebd71a4bf4d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e56c9b7-f0", "ovs_interfaceid": "7e56c9b7-f0cb-41e5-b513-077c74cba86c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.542087] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquiring lock "refresh_cache-1722d63d-e604-44fe-8198-13e6c5bce016" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.542296] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquired lock "refresh_cache-1722d63d-e604-44fe-8198-13e6c5bce016" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.542545] env[61545]: DEBUG nova.network.neutron [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 759.623848] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255580, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519871} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.624014] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5a284df5-88ea-43bf-9944-ef344f99591c/5a284df5-88ea-43bf-9944-ef344f99591c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.624327] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.624616] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77f529c3-3af6-4022-b1d0-16f425afedbd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.636704] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 759.636704] env[61545]: value = "task-4255581" [ 759.636704] env[61545]: _type = "Task" [ 759.636704] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.655022] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.664350] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f14388-0540-6620-5a09-ace59f93e305, 'name': SearchDatastore_Task, 'duration_secs': 0.009883} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.664872] env[61545]: DEBUG oslo_concurrency.lockutils [req-743ff3cb-7cbb-4b47-a276-83cdefef8ee4 req-6b68cf6a-e58b-41df-b4f4-d2891e6147e0 service nova] Releasing lock "refresh_cache-d7e25ea6-7076-4ab2-aed6-fe5232c2665d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.665557] env[61545]: DEBUG oslo_concurrency.lockutils [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.665557] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. {{(pid=61545) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 759.665875] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87924872-f79a-433d-9a5d-0b806edcdf4b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.675536] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 759.675536] env[61545]: value = "task-4255582" [ 759.675536] env[61545]: _type = "Task" [ 759.675536] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.684839] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.780301] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12de9847-2eb0-4787-96e3-070583cc3167 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.791339] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd85d87d-7f03-401a-a974-fe1dd12cea22 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.833538] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd10027-8b9d-4b96-b5de-100aca831721 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.842239] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a57f41-6c2d-406a-96b1-95823747b4a4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.856311] env[61545]: DEBUG nova.compute.provider_tree [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.097833] env[61545]: DEBUG nova.network.neutron [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.147083] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073492} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.147358] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.148251] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79acd0c9-d073-4a56-a9df-42da096e94a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.153564] env[61545]: DEBUG nova.compute.manager [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Received event network-vif-plugged-20ad03f5-59d2-4cd0-b053-4d59cc02f11b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 760.153831] env[61545]: DEBUG oslo_concurrency.lockutils [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] Acquiring lock "1722d63d-e604-44fe-8198-13e6c5bce016-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.154552] env[61545]: DEBUG oslo_concurrency.lockutils [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] Lock "1722d63d-e604-44fe-8198-13e6c5bce016-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.154776] env[61545]: DEBUG oslo_concurrency.lockutils [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] Lock "1722d63d-e604-44fe-8198-13e6c5bce016-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.154951] env[61545]: DEBUG nova.compute.manager [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] No waiting events found dispatching network-vif-plugged-20ad03f5-59d2-4cd0-b053-4d59cc02f11b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 760.155129] env[61545]: WARNING nova.compute.manager [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Received unexpected event network-vif-plugged-20ad03f5-59d2-4cd0-b053-4d59cc02f11b for instance with vm_state building and task_state spawning. [ 760.155298] env[61545]: DEBUG nova.compute.manager [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Received event network-changed-20ad03f5-59d2-4cd0-b053-4d59cc02f11b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 760.155452] env[61545]: DEBUG nova.compute.manager [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Refreshing instance network info cache due to event network-changed-20ad03f5-59d2-4cd0-b053-4d59cc02f11b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 760.155616] env[61545]: DEBUG oslo_concurrency.lockutils [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] Acquiring lock "refresh_cache-1722d63d-e604-44fe-8198-13e6c5bce016" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.174870] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 5a284df5-88ea-43bf-9944-ef344f99591c/5a284df5-88ea-43bf-9944-ef344f99591c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.178316] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-574aa26b-d4d9-4d75-8054-ecfc91d19133 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.205212] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255582, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528801} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.206926] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. [ 760.207352] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 760.207352] env[61545]: value = "task-4255583" [ 760.207352] env[61545]: _type = "Task" [ 760.207352] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.209685] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d646fcc-9ab1-4701-aeb0-9e06e3e5e524 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.222524] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255583, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.243644] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.243878] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65e7dc70-c1f3-4873-84a5-70d5f35aa3ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.267374] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 760.267374] env[61545]: value = "task-4255584" [ 760.267374] env[61545]: _type = "Task" [ 760.267374] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.277634] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255584, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.353392] env[61545]: DEBUG nova.network.neutron [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Updating instance_info_cache with network_info: [{"id": "20ad03f5-59d2-4cd0-b053-4d59cc02f11b", "address": "fa:16:3e:32:0a:d5", "network": {"id": "e951b161-1714-4b9a-a6a1-e3f9e98abb09", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-436630041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2aa5551c05f24e018e6b7c73a4310fae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bdf594e-da7a-4254-b413-87aef4614588", "external-id": "nsx-vlan-transportzone-422", "segmentation_id": 422, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20ad03f5-59", "ovs_interfaceid": "20ad03f5-59d2-4cd0-b053-4d59cc02f11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.359037] env[61545]: DEBUG nova.scheduler.client.report [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.723401] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255583, 'name': ReconfigVM_Task, 'duration_secs': 0.319452} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.723766] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 5a284df5-88ea-43bf-9944-ef344f99591c/5a284df5-88ea-43bf-9944-ef344f99591c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.724405] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09049320-0b74-4624-aa7c-68a3bc4ebc0f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.731296] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 760.731296] env[61545]: value = "task-4255585" [ 760.731296] env[61545]: _type = "Task" [ 760.731296] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.740475] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255585, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.777463] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255584, 'name': ReconfigVM_Task, 'duration_secs': 0.321127} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.777779] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.778906] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d410ebb9-9ef3-4b2c-8bae-1d647718e194 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.804885] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29c3727f-25cb-4963-9ee0-f9ca66259a1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.822028] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 760.822028] env[61545]: value = "task-4255586" [ 760.822028] env[61545]: _type = "Task" [ 760.822028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.831341] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255586, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.856112] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Releasing lock "refresh_cache-1722d63d-e604-44fe-8198-13e6c5bce016" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.856475] env[61545]: DEBUG nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Instance network_info: |[{"id": "20ad03f5-59d2-4cd0-b053-4d59cc02f11b", "address": "fa:16:3e:32:0a:d5", "network": {"id": "e951b161-1714-4b9a-a6a1-e3f9e98abb09", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-436630041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2aa5551c05f24e018e6b7c73a4310fae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bdf594e-da7a-4254-b413-87aef4614588", "external-id": "nsx-vlan-transportzone-422", "segmentation_id": 422, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20ad03f5-59", "ovs_interfaceid": "20ad03f5-59d2-4cd0-b053-4d59cc02f11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 760.856804] env[61545]: DEBUG oslo_concurrency.lockutils [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] Acquired lock "refresh_cache-1722d63d-e604-44fe-8198-13e6c5bce016" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.857089] env[61545]: DEBUG nova.network.neutron [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Refreshing network info cache for port 20ad03f5-59d2-4cd0-b053-4d59cc02f11b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.858468] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:0a:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bdf594e-da7a-4254-b413-87aef4614588', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20ad03f5-59d2-4cd0-b053-4d59cc02f11b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.866467] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Creating folder: Project (2aa5551c05f24e018e6b7c73a4310fae). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 760.868361] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.954s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.868904] env[61545]: DEBUG nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 760.872256] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3e612dd-a13c-43b9-aa5c-39a21d4fbbfa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.873861] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.727s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.875427] env[61545]: INFO nova.compute.claims [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.890046] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Created folder: Project (2aa5551c05f24e018e6b7c73a4310fae) in parent group-v838542. [ 760.890355] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Creating folder: Instances. Parent ref: group-v838621. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 760.890635] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fda56b4d-1aaa-4f04-9d22-81f29f5b4c3a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.901676] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Created folder: Instances in parent group-v838621. [ 760.901815] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 760.902043] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 760.902312] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e89c2cc-0199-4713-bf3f-bb3875f4dce1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.924178] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.924178] env[61545]: value = "task-4255589" [ 760.924178] env[61545]: _type = "Task" [ 760.924178] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.933938] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255589, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.242624] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255585, 'name': Rename_Task, 'duration_secs': 0.140686} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.242904] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.243177] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ae58ed4-a4b9-4ae0-985c-fd1d1b8f01f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.250604] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 761.250604] env[61545]: value = "task-4255590" [ 761.250604] env[61545]: _type = "Task" [ 761.250604] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.259480] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.332364] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255586, 'name': ReconfigVM_Task, 'duration_secs': 0.165865} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.332679] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.332934] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ef15152-3e26-4fc0-98ed-732a2bbb0fd1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.339752] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 761.339752] env[61545]: value = "task-4255591" [ 761.339752] env[61545]: _type = "Task" [ 761.339752] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.347957] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.375322] env[61545]: DEBUG nova.compute.utils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 761.376819] env[61545]: DEBUG nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 761.376991] env[61545]: DEBUG nova.network.neutron [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.437191] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255589, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.501180] env[61545]: DEBUG nova.policy [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b71cb716e8bb49c381c527435cf10cd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd896bed92af4c2b83dc71adedda9c6e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 761.522761] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "dad53420-37f1-42ef-b0d3-e35c73b97417" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.523100] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.732882] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "844f01ed-4dae-4e13-9d1c-09a73f413201" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.733218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.733531] env[61545]: INFO nova.compute.manager [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Rebooting instance [ 761.763864] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255590, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.853080] env[61545]: DEBUG oslo_vmware.api [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255591, 'name': PowerOnVM_Task, 'duration_secs': 0.478286} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.853381] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 761.856602] env[61545]: DEBUG nova.compute.manager [None req-60c83fb9-7204-432f-ac1e-8869020eb3d3 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 761.857488] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f030a5-c41b-492d-be7f-c903a33ff28b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.880522] env[61545]: DEBUG nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 761.918913] env[61545]: DEBUG nova.network.neutron [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Updated VIF entry in instance network info cache for port 20ad03f5-59d2-4cd0-b053-4d59cc02f11b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 761.919418] env[61545]: DEBUG nova.network.neutron [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Updating instance_info_cache with network_info: [{"id": "20ad03f5-59d2-4cd0-b053-4d59cc02f11b", "address": "fa:16:3e:32:0a:d5", "network": {"id": "e951b161-1714-4b9a-a6a1-e3f9e98abb09", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-436630041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2aa5551c05f24e018e6b7c73a4310fae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bdf594e-da7a-4254-b413-87aef4614588", "external-id": "nsx-vlan-transportzone-422", "segmentation_id": 422, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20ad03f5-59", "ovs_interfaceid": "20ad03f5-59d2-4cd0-b053-4d59cc02f11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.944062] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255589, 'name': CreateVM_Task, 'duration_secs': 0.767966} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.944062] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.944474] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.944474] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.944755] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 761.947778] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8ae664e-5856-4674-a46e-2991a81ba7a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.953582] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 761.953582] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d520af-1f08-2c22-b6a1-a16677ca8201" [ 761.953582] env[61545]: _type = "Task" [ 761.953582] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.963419] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d520af-1f08-2c22-b6a1-a16677ca8201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.120844] env[61545]: DEBUG nova.network.neutron [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Successfully created port: 25571e3a-347e-4c8e-82cc-0f636f3bca8d {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.266010] env[61545]: DEBUG oslo_vmware.api [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255590, 'name': PowerOnVM_Task, 'duration_secs': 0.631217} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.266332] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.266511] env[61545]: INFO nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Took 10.31 seconds to spawn the instance on the hypervisor. [ 762.266697] env[61545]: DEBUG nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.267519] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68221b1f-2087-47cf-8517-ea9f2a519362 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.273683] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.273683] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.273683] env[61545]: DEBUG nova.network.neutron [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.421768] env[61545]: DEBUG oslo_concurrency.lockutils [req-576de104-fd42-4640-99b0-42ce0e814e1d req-5f39a47c-4a53-439b-8498-3f8e61d41376 service nova] Releasing lock "refresh_cache-1722d63d-e604-44fe-8198-13e6c5bce016" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.467527] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d520af-1f08-2c22-b6a1-a16677ca8201, 'name': SearchDatastore_Task, 'duration_secs': 0.010802} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.467835] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.468087] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 762.468330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.468476] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.468653] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 762.468945] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e766ed69-df31-47fe-a8d6-66dd29f0903d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.483257] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 762.483464] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 762.484269] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48537338-931a-4202-b8db-c8bca80f7e67 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.494333] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 762.494333] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528ae871-be8d-21f5-2925-f3f0c970fdd4" [ 762.494333] env[61545]: _type = "Task" [ 762.494333] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.506296] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528ae871-be8d-21f5-2925-f3f0c970fdd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.615567] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec56620-482c-4f6d-88e5-ad8b6d2f9709 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.624888] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6473c9-1f95-4572-bcca-1980bea1fb25 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.662556] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a72a91-553f-49b3-be20-17bb87554fb4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.675262] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb14cce-6222-43b9-9159-b4a5f0c0f00a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.687374] env[61545]: DEBUG nova.compute.provider_tree [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.796427] env[61545]: INFO nova.compute.manager [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Took 47.77 seconds to build instance. [ 762.889950] env[61545]: DEBUG nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 762.915634] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 762.915893] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.916076] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 762.916261] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.916405] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 762.916551] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 762.916762] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 762.916923] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 762.917606] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 762.917816] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 762.918008] env[61545]: DEBUG nova.virt.hardware [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 762.918931] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a0f6d1-8048-457f-af35-e6cbc8692c7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.927743] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603e4c08-fa4d-40a1-a592-f3ea620bf454 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.005479] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528ae871-be8d-21f5-2925-f3f0c970fdd4, 'name': SearchDatastore_Task, 'duration_secs': 0.009871} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.006403] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-567656c3-a990-40f2-9b84-01154b6deeac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.013430] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 763.013430] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5208af5e-34c0-4057-ec1f-324383bc93db" [ 763.013430] env[61545]: _type = "Task" [ 763.013430] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.022665] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5208af5e-34c0-4057-ec1f-324383bc93db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.153436] env[61545]: DEBUG nova.network.neutron [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.193531] env[61545]: DEBUG nova.scheduler.client.report [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 763.298040] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1e1d906-4564-4cfb-b8d5-81addcdcabdf tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.816s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.526705] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5208af5e-34c0-4057-ec1f-324383bc93db, 'name': SearchDatastore_Task, 'duration_secs': 0.009209} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.527030] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.527304] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1722d63d-e604-44fe-8198-13e6c5bce016/1722d63d-e604-44fe-8198-13e6c5bce016.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.527589] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6b24920-d739-47ae-8944-2f8c070fd00a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.536345] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 763.536345] env[61545]: value = "task-4255592" [ 763.536345] env[61545]: _type = "Task" [ 763.536345] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.547969] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.656615] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.702177] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.824s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.702177] env[61545]: DEBUG nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 763.704467] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.463s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.713117] env[61545]: INFO nova.compute.claims [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.802206] env[61545]: DEBUG nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 763.914885] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.914885] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.050738] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255592, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.165074] env[61545]: DEBUG nova.compute.manager [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 764.165074] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80064ab6-79cd-4640-8c43-a12bb4ec17f6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.220364] env[61545]: DEBUG nova.compute.utils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 764.221753] env[61545]: DEBUG nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 764.222092] env[61545]: DEBUG nova.network.neutron [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 764.326168] env[61545]: DEBUG nova.policy [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a76971360f074d398d059dbcb9ada6ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae60c9c1b7804134b570d0384dc85ea5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 764.334464] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.550553] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526602} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.553204] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1722d63d-e604-44fe-8198-13e6c5bce016/1722d63d-e604-44fe-8198-13e6c5bce016.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.553204] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.553204] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b56f61c-1bcb-42b7-bf41-8885b268ba3a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.562623] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 764.562623] env[61545]: value = "task-4255593" [ 764.562623] env[61545]: _type = "Task" [ 764.562623] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.579575] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.644891] env[61545]: DEBUG nova.network.neutron [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Successfully updated port: 25571e3a-347e-4c8e-82cc-0f636f3bca8d {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 764.729074] env[61545]: DEBUG nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 765.058636] env[61545]: DEBUG nova.network.neutron [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Successfully created port: d289bc06-c77a-460a-b15d-e94dcfb3ff53 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.074833] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079001} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.075118] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.075998] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28ffa55-38b8-4fa5-bf91-72263d51cb95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.100159] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 1722d63d-e604-44fe-8198-13e6c5bce016/1722d63d-e604-44fe-8198-13e6c5bce016.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.103054] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be62db85-829e-4f74-b69f-81c40f9c019f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.125149] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 765.125149] env[61545]: value = "task-4255594" [ 765.125149] env[61545]: _type = "Task" [ 765.125149] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.134895] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.148131] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquiring lock "refresh_cache-2a0576f9-d740-4dfa-9783-17eb3987840b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.148319] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquired lock "refresh_cache-2a0576f9-d740-4dfa-9783-17eb3987840b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.148501] env[61545]: DEBUG nova.network.neutron [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.180808] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81cd8e4-5458-4ded-9c46-0f0f17d5b09c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.193402] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Doing hard reboot of VM {{(pid=61545) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 765.194016] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b614769c-64ef-43af-aebb-fa9b8ce109ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.203305] env[61545]: DEBUG oslo_vmware.api [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 765.203305] env[61545]: value = "task-4255595" [ 765.203305] env[61545]: _type = "Task" [ 765.203305] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.213193] env[61545]: DEBUG oslo_vmware.api [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255595, 'name': ResetVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.237872] env[61545]: DEBUG nova.compute.manager [req-0dff9399-86b3-42b9-a823-4e6c838c8589 req-0d267b88-8262-4197-bc44-f998b5440c3f service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Received event network-vif-plugged-25571e3a-347e-4c8e-82cc-0f636f3bca8d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 765.238160] env[61545]: DEBUG oslo_concurrency.lockutils [req-0dff9399-86b3-42b9-a823-4e6c838c8589 req-0d267b88-8262-4197-bc44-f998b5440c3f service nova] Acquiring lock "2a0576f9-d740-4dfa-9783-17eb3987840b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.238395] env[61545]: DEBUG oslo_concurrency.lockutils [req-0dff9399-86b3-42b9-a823-4e6c838c8589 req-0d267b88-8262-4197-bc44-f998b5440c3f service nova] Lock "2a0576f9-d740-4dfa-9783-17eb3987840b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.238567] env[61545]: DEBUG oslo_concurrency.lockutils [req-0dff9399-86b3-42b9-a823-4e6c838c8589 req-0d267b88-8262-4197-bc44-f998b5440c3f service nova] Lock "2a0576f9-d740-4dfa-9783-17eb3987840b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.238739] env[61545]: DEBUG nova.compute.manager [req-0dff9399-86b3-42b9-a823-4e6c838c8589 req-0d267b88-8262-4197-bc44-f998b5440c3f service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] No waiting events found dispatching network-vif-plugged-25571e3a-347e-4c8e-82cc-0f636f3bca8d {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 765.238957] env[61545]: WARNING nova.compute.manager [req-0dff9399-86b3-42b9-a823-4e6c838c8589 req-0d267b88-8262-4197-bc44-f998b5440c3f service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Received unexpected event network-vif-plugged-25571e3a-347e-4c8e-82cc-0f636f3bca8d for instance with vm_state building and task_state spawning. [ 765.394959] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66d5edc-cc34-4bd6-980a-8b20f6e5b88c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.403959] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e89fc5d-1dc7-47ab-a832-3c60e66d1976 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.443292] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c9e0b5-b0f2-4200-a819-d4c3aef06a09 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.453277] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2e7070-861b-4d25-b952-923f1784ac30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.467835] env[61545]: DEBUG nova.compute.provider_tree [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.636322] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255594, 'name': ReconfigVM_Task, 'duration_secs': 0.353126} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.636624] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 1722d63d-e604-44fe-8198-13e6c5bce016/1722d63d-e604-44fe-8198-13e6c5bce016.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.637325] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0a2ac69-5976-48f3-a72c-b6f684c6cc65 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.644947] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 765.644947] env[61545]: value = "task-4255596" [ 765.644947] env[61545]: _type = "Task" [ 765.644947] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.654695] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255596, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.695446] env[61545]: DEBUG nova.network.neutron [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.713662] env[61545]: DEBUG oslo_vmware.api [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255595, 'name': ResetVM_Task, 'duration_secs': 0.104819} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.714143] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Did hard reboot of VM {{(pid=61545) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 765.714382] env[61545]: DEBUG nova.compute.manager [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 765.715389] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2c7ad4-d893-4b0d-873f-46b4ad7888fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.740993] env[61545]: DEBUG nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 765.778354] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 765.778684] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.778971] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.779159] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.779377] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.779590] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 765.779854] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 765.780143] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 765.780328] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 765.780844] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 765.781224] env[61545]: DEBUG nova.virt.hardware [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 765.782538] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8c279f-ec5f-4d3b-88c9-448deb810f42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.791516] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3ddb27-42d7-4333-9274-1bf2a5401203 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.892702] env[61545]: DEBUG nova.network.neutron [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Updating instance_info_cache with network_info: [{"id": "25571e3a-347e-4c8e-82cc-0f636f3bca8d", "address": "fa:16:3e:66:06:d1", "network": {"id": "075e9a39-c48b-4639-ae0f-3c6221edbe84", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-606362329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd896bed92af4c2b83dc71adedda9c6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25571e3a-34", "ovs_interfaceid": "25571e3a-347e-4c8e-82cc-0f636f3bca8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.971129] env[61545]: DEBUG nova.scheduler.client.report [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 766.159186] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255596, 'name': Rename_Task, 'duration_secs': 0.149016} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.159805] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 766.160131] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96b317cf-aa81-491f-9e9c-a7cd6c59bfee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.168308] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 766.168308] env[61545]: value = "task-4255597" [ 766.168308] env[61545]: _type = "Task" [ 766.168308] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.181565] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.236257] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1c658b7e-a5ca-469a-a3c3-034ec6d6fdf7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.503s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.396705] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Releasing lock "refresh_cache-2a0576f9-d740-4dfa-9783-17eb3987840b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.397115] env[61545]: DEBUG nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Instance network_info: |[{"id": "25571e3a-347e-4c8e-82cc-0f636f3bca8d", "address": "fa:16:3e:66:06:d1", "network": {"id": "075e9a39-c48b-4639-ae0f-3c6221edbe84", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-606362329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd896bed92af4c2b83dc71adedda9c6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25571e3a-34", "ovs_interfaceid": "25571e3a-347e-4c8e-82cc-0f636f3bca8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 766.398099] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:06:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25571e3a-347e-4c8e-82cc-0f636f3bca8d', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.408515] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Creating folder: Project (bd896bed92af4c2b83dc71adedda9c6e). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 766.409050] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cbe67dbb-d6e9-4aea-a29f-f61f2d889570 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.423696] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Created folder: Project (bd896bed92af4c2b83dc71adedda9c6e) in parent group-v838542. [ 766.423983] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Creating folder: Instances. Parent ref: group-v838624. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 766.424251] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-033d6a5a-704d-4e3a-8a0f-65b3cc23562f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.437051] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Created folder: Instances in parent group-v838624. [ 766.437374] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 766.437567] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 766.437765] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6cc0aef1-acf5-4f3f-a245-ec7397b84b5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.457823] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.457823] env[61545]: value = "task-4255600" [ 766.457823] env[61545]: _type = "Task" [ 766.457823] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.466779] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255600, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.478331] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.478712] env[61545]: DEBUG nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 766.481600] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.461s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.484210] env[61545]: INFO nova.compute.claims [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.684690] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255597, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.971801] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255600, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.988940] env[61545]: DEBUG nova.compute.utils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 766.992810] env[61545]: DEBUG nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 766.993039] env[61545]: DEBUG nova.network.neutron [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 767.183843] env[61545]: DEBUG oslo_vmware.api [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255597, 'name': PowerOnVM_Task, 'duration_secs': 0.526686} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.185456] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.185456] env[61545]: INFO nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Took 10.07 seconds to spawn the instance on the hypervisor. [ 767.185456] env[61545]: DEBUG nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 767.185456] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06b5340-280e-4e05-bf3c-8fce0ac7c561 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.190290] env[61545]: DEBUG nova.policy [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c752932387242eaac2e453dc8d8b7cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '723b7377d6c8413cb6ef900c8404b0b5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 767.469128] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255600, 'name': CreateVM_Task, 'duration_secs': 0.584882} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.469455] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.470097] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.470438] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.470590] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.470859] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-640c42c9-fe0a-4a1e-a619-43123f5e0426 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.476123] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 767.476123] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d6c12d-0f23-6140-0c61-d565f0f4bea2" [ 767.476123] env[61545]: _type = "Task" [ 767.476123] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.484545] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d6c12d-0f23-6140-0c61-d565f0f4bea2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.496938] env[61545]: DEBUG nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 767.624262] env[61545]: DEBUG nova.network.neutron [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Successfully updated port: d289bc06-c77a-460a-b15d-e94dcfb3ff53 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.709592] env[61545]: INFO nova.compute.manager [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Took 49.42 seconds to build instance. [ 767.981108] env[61545]: DEBUG nova.network.neutron [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Successfully created port: 1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.995284] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d6c12d-0f23-6140-0c61-d565f0f4bea2, 'name': SearchDatastore_Task, 'duration_secs': 0.015664} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.995284] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.995284] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 767.995284] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.995480] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.995480] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 767.995480] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4ea4da7-0113-4919-bd92-daf0b3440214 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.010615] env[61545]: INFO nova.virt.block_device [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Booting with volume 7578d319-27d0-4d5d-99aa-9bce7818396d at /dev/sda [ 768.011944] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.012736] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.013631] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-114bd154-1a91-41a5-86e3-15354a276c9b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.022733] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 768.022733] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52360a1f-d701-aab0-59cb-cc92dfd1e62d" [ 768.022733] env[61545]: _type = "Task" [ 768.022733] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.035291] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52360a1f-d701-aab0-59cb-cc92dfd1e62d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.056851] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65922fab-8e2a-4cf1-a51c-c501c32d9e43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.069473] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0023c42-3eee-4f47-a7a7-9ae41d3fd1f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.108415] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-208b0334-d2ec-4d47-80fb-7cffbeb5e298 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.118831] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1a0eb6-f0ae-4ec1-8996-9b122d20e7fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.132470] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "refresh_cache-7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.132611] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "refresh_cache-7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.132767] env[61545]: DEBUG nova.network.neutron [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.158619] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206ac926-09a6-4e9c-8268-f7e8762cf4a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.168935] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925d0900-3e19-48b3-b563-12025e11a1e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.185301] env[61545]: DEBUG nova.virt.block_device [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Updating existing volume attachment record: b918790a-2a16-42e2-8fa7-40b79e529b95 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 768.217414] env[61545]: DEBUG oslo_concurrency.lockutils [None req-97bcde41-d337-4b74-877a-241b6dc8050c tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "1722d63d-e604-44fe-8198-13e6c5bce016" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.319s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.258118] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2944dd2a-3342-4ccc-9fa6-5df82d09b328 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.266566] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c65b69-3fd0-4c1c-890e-7e7bbbc4cd3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.302925] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2dc9f9-14cc-4e6b-acfb-603811c28cfe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.311830] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bf7060-0a4d-4fdc-8bb5-6345e4f0cef5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.328433] env[61545]: DEBUG nova.compute.provider_tree [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.520509] env[61545]: DEBUG nova.compute.manager [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Received event network-changed-25571e3a-347e-4c8e-82cc-0f636f3bca8d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 768.520509] env[61545]: DEBUG nova.compute.manager [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Refreshing instance network info cache due to event network-changed-25571e3a-347e-4c8e-82cc-0f636f3bca8d. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 768.520509] env[61545]: DEBUG oslo_concurrency.lockutils [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] Acquiring lock "refresh_cache-2a0576f9-d740-4dfa-9783-17eb3987840b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.520509] env[61545]: DEBUG oslo_concurrency.lockutils [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] Acquired lock "refresh_cache-2a0576f9-d740-4dfa-9783-17eb3987840b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.520509] env[61545]: DEBUG nova.network.neutron [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Refreshing network info cache for port 25571e3a-347e-4c8e-82cc-0f636f3bca8d {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 768.539274] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52360a1f-d701-aab0-59cb-cc92dfd1e62d, 'name': SearchDatastore_Task, 'duration_secs': 0.024765} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.540594] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ffc917b-528f-40d9-9f78-c8c68ff58f1d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.549389] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 768.549389] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5223935e-fcd1-eb6c-45ac-76835024a560" [ 768.549389] env[61545]: _type = "Task" [ 768.549389] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.565971] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5223935e-fcd1-eb6c-45ac-76835024a560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.687561] env[61545]: DEBUG nova.network.neutron [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.720627] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 768.835229] env[61545]: DEBUG nova.scheduler.client.report [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 768.928518] env[61545]: DEBUG nova.network.neutron [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Updating instance_info_cache with network_info: [{"id": "d289bc06-c77a-460a-b15d-e94dcfb3ff53", "address": "fa:16:3e:27:ab:48", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd289bc06-c7", "ovs_interfaceid": "d289bc06-c77a-460a-b15d-e94dcfb3ff53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.063143] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5223935e-fcd1-eb6c-45ac-76835024a560, 'name': SearchDatastore_Task, 'duration_secs': 0.021445} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.066277] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.066608] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2a0576f9-d740-4dfa-9783-17eb3987840b/2a0576f9-d740-4dfa-9783-17eb3987840b.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 769.066981] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d82964dd-9278-4b9d-8a3d-66780ba36324 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.074548] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 769.074548] env[61545]: value = "task-4255601" [ 769.074548] env[61545]: _type = "Task" [ 769.074548] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.083344] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.256625] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.343270] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.859s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.343270] env[61545]: DEBUG nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 769.351027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.475s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.354873] env[61545]: DEBUG nova.objects.instance [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lazy-loading 'resources' on Instance uuid c1b1ac1a-32da-442d-86ef-d754165f5a81 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 769.391643] env[61545]: DEBUG nova.network.neutron [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Updated VIF entry in instance network info cache for port 25571e3a-347e-4c8e-82cc-0f636f3bca8d. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 769.392237] env[61545]: DEBUG nova.network.neutron [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Updating instance_info_cache with network_info: [{"id": "25571e3a-347e-4c8e-82cc-0f636f3bca8d", "address": "fa:16:3e:66:06:d1", "network": {"id": "075e9a39-c48b-4639-ae0f-3c6221edbe84", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-606362329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd896bed92af4c2b83dc71adedda9c6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25571e3a-34", "ovs_interfaceid": "25571e3a-347e-4c8e-82cc-0f636f3bca8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.431432] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "refresh_cache-7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.432770] env[61545]: DEBUG nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance network_info: |[{"id": "d289bc06-c77a-460a-b15d-e94dcfb3ff53", "address": "fa:16:3e:27:ab:48", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd289bc06-c7", "ovs_interfaceid": "d289bc06-c77a-460a-b15d-e94dcfb3ff53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 769.433380] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:ab:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd289bc06-c77a-460a-b15d-e94dcfb3ff53', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.444169] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Creating folder: Project (ae60c9c1b7804134b570d0384dc85ea5). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.444946] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-543c5147-bb55-4b15-9ef6-3d1ced0ce55c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.461027] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Created folder: Project (ae60c9c1b7804134b570d0384dc85ea5) in parent group-v838542. [ 769.461027] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Creating folder: Instances. Parent ref: group-v838627. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.461027] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aacde4d2-29ad-453e-bcc9-525054c1e655 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.473369] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Created folder: Instances in parent group-v838627. [ 769.474119] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 769.474119] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.474284] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-879e5ce2-ee8c-44a7-bb35-ec965e2b60a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.503164] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.503164] env[61545]: value = "task-4255604" [ 769.503164] env[61545]: _type = "Task" [ 769.503164] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.512957] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255604, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.591169] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255601, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.858771] env[61545]: DEBUG nova.compute.utils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 769.868912] env[61545]: DEBUG nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 769.868912] env[61545]: DEBUG nova.network.neutron [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 769.894903] env[61545]: DEBUG oslo_concurrency.lockutils [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] Releasing lock "refresh_cache-2a0576f9-d740-4dfa-9783-17eb3987840b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.895185] env[61545]: DEBUG nova.compute.manager [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Received event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 769.895361] env[61545]: DEBUG nova.compute.manager [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing instance network info cache due to event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 769.895575] env[61545]: DEBUG oslo_concurrency.lockutils [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] Acquiring lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.895713] env[61545]: DEBUG oslo_concurrency.lockutils [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] Acquired lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.895916] env[61545]: DEBUG nova.network.neutron [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.973759] env[61545]: DEBUG nova.policy [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a76971360f074d398d059dbcb9ada6ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae60c9c1b7804134b570d0384dc85ea5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 770.015168] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255604, 'name': CreateVM_Task, 'duration_secs': 0.387108} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.015343] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.016077] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.016253] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.016563] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.016840] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77ed7344-b668-4508-9524-365911d95f51 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.022296] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 770.022296] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e6e043-7e0f-abab-ef05-b8cea14cf35b" [ 770.022296] env[61545]: _type = "Task" [ 770.022296] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.034083] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e6e043-7e0f-abab-ef05-b8cea14cf35b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.087570] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536213} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.087831] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2a0576f9-d740-4dfa-9783-17eb3987840b/2a0576f9-d740-4dfa-9783-17eb3987840b.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 770.088062] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 770.088343] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17fba924-ac97-436e-b746-1fb44d40c58e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.095577] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 770.095577] env[61545]: value = "task-4255605" [ 770.095577] env[61545]: _type = "Task" [ 770.095577] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.104870] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255605, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.359183] env[61545]: DEBUG nova.network.neutron [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Successfully updated port: 1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.364750] env[61545]: DEBUG nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 770.370293] env[61545]: DEBUG nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 770.370823] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 770.371058] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.371259] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 770.371516] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.371676] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 770.371823] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 770.372296] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 770.372509] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 770.372691] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 770.372858] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 770.373049] env[61545]: DEBUG nova.virt.hardware [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 770.374322] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a206622-02be-4587-82da-f4075f5aa98e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.386613] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6c39d1-589b-492e-992c-4d9d2946ef31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.530975] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37448d0f-9cc2-4985-9e4a-f943fb9ca8cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.539615] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e6e043-7e0f-abab-ef05-b8cea14cf35b, 'name': SearchDatastore_Task, 'duration_secs': 0.059104} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.539615] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.539840] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 770.539933] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.540158] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.540464] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 770.540593] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b544f0f4-727c-4aa5-bc9e-c45ffda4d662 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.546251] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aaecde2-47c9-4319-a4d0-861021d71984 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.579810] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 770.580125] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 770.580887] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a894abf6-201a-4559-ac66-3f44233a7686 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.583990] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6a9e51-80dd-4f6d-8663-cf04631cfe7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.590997] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 770.590997] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52740bc4-b384-0863-b739-cc357533544d" [ 770.590997] env[61545]: _type = "Task" [ 770.590997] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.597542] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9329c4-ebcd-4ba5-bcb6-871f757ad983 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.614568] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52740bc4-b384-0863-b739-cc357533544d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.625799] env[61545]: DEBUG nova.compute.provider_tree [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.627361] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255605, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184911} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.627635] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.628677] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee565e1-b42d-4268-95dc-55b493f559fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.652414] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 2a0576f9-d740-4dfa-9783-17eb3987840b/2a0576f9-d740-4dfa-9783-17eb3987840b.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.655952] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88c17629-832d-4610-904c-80259da2d25b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.677229] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 770.677229] env[61545]: value = "task-4255606" [ 770.677229] env[61545]: _type = "Task" [ 770.677229] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.686191] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255606, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.864586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Acquiring lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.864799] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Acquired lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.864911] env[61545]: DEBUG nova.network.neutron [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.105786] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52740bc4-b384-0863-b739-cc357533544d, 'name': SearchDatastore_Task, 'duration_secs': 0.053876} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.106726] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46f96b23-58c5-450c-ba72-dd7a3a28e91a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.112550] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 771.112550] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c00e2-b642-e7f9-8adb-579c1d1f8b52" [ 771.112550] env[61545]: _type = "Task" [ 771.112550] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.123430] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c00e2-b642-e7f9-8adb-579c1d1f8b52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.129905] env[61545]: DEBUG nova.scheduler.client.report [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.163050] env[61545]: DEBUG nova.network.neutron [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Successfully created port: 71a8926c-7ea6-4f74-b990-81464c47f0c1 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.188142] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255606, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.233673] env[61545]: DEBUG nova.network.neutron [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updated VIF entry in instance network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 771.234062] env[61545]: DEBUG nova.network.neutron [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updating instance_info_cache with network_info: [{"id": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "address": "fa:16:3e:9d:12:c4", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4792d3c-8f", "ovs_interfaceid": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.380164] env[61545]: DEBUG nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 771.416306] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 771.417667] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.417878] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 771.418094] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.418734] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 771.418935] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 771.419332] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 771.419403] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 771.419519] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 771.419679] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 771.419850] env[61545]: DEBUG nova.virt.hardware [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 771.420777] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807a6b4d-0ac7-4396-a7c6-e16f7c5b83cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.430039] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39146c94-8d44-4ece-a9ad-d163e9b0516b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.464721] env[61545]: DEBUG nova.network.neutron [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.623944] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c00e2-b642-e7f9-8adb-579c1d1f8b52, 'name': SearchDatastore_Task, 'duration_secs': 0.02463} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.624311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.624503] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.624772] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fee2946-21d9-4025-b0e3-43f227f31376 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.633179] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 771.633179] env[61545]: value = "task-4255607" [ 771.633179] env[61545]: _type = "Task" [ 771.633179] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.637394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.286s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.644770] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.876s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.644770] env[61545]: DEBUG nova.objects.instance [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lazy-loading 'resources' on Instance uuid 9dbff26a-210c-4e80-812f-c91debe3e9c1 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 771.651062] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.673055] env[61545]: INFO nova.scheduler.client.report [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Deleted allocations for instance c1b1ac1a-32da-442d-86ef-d754165f5a81 [ 771.689341] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255606, 'name': ReconfigVM_Task, 'duration_secs': 0.725128} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.689748] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 2a0576f9-d740-4dfa-9783-17eb3987840b/2a0576f9-d740-4dfa-9783-17eb3987840b.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.690296] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e743a70-eccd-4a87-9f67-8f1e7eaba28b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.698116] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 771.698116] env[61545]: value = "task-4255608" [ 771.698116] env[61545]: _type = "Task" [ 771.698116] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.707745] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255608, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.737655] env[61545]: DEBUG oslo_concurrency.lockutils [req-37bea13a-7af4-4630-8f83-04096ce0d6d6 req-f8e937c8-0ad8-4255-ad40-88ccede0350e service nova] Releasing lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.790319] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Received event network-vif-plugged-d289bc06-c77a-460a-b15d-e94dcfb3ff53 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 771.790319] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquiring lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.790319] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.790460] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.790711] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] No waiting events found dispatching network-vif-plugged-d289bc06-c77a-460a-b15d-e94dcfb3ff53 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 771.790990] env[61545]: WARNING nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Received unexpected event network-vif-plugged-d289bc06-c77a-460a-b15d-e94dcfb3ff53 for instance with vm_state building and task_state spawning. [ 771.791192] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Received event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 771.791383] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing instance network info cache due to event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 771.791616] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquiring lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.792014] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquired lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.792465] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 771.826930] env[61545]: DEBUG nova.network.neutron [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Updating instance_info_cache with network_info: [{"id": "1d429567-3342-4107-b5f0-2596a7002020", "address": "fa:16:3e:67:8f:5d", "network": {"id": "1d9a30ee-02cb-4684-9776-a725dcfd5617", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-857592221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "723b7377d6c8413cb6ef900c8404b0b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf99dce-c773-48db-a2d9-00b8d0a7c75d", "external-id": "nsx-vlan-transportzone-248", "segmentation_id": 248, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d429567-33", "ovs_interfaceid": "1d429567-3342-4107-b5f0-2596a7002020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.144418] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255607, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.191168] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43583ea1-278f-48fa-b51d-b2263b84eca5 tempest-DeleteServersAdminTestJSON-1595240936 tempest-DeleteServersAdminTestJSON-1595240936-project-member] Lock "c1b1ac1a-32da-442d-86ef-d754165f5a81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.647s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.212476] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255608, 'name': Rename_Task, 'duration_secs': 0.141938} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.213676] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 772.214073] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8af252bd-45f7-4c94-a9a4-8a5ae05a8655 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.224504] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 772.224504] env[61545]: value = "task-4255609" [ 772.224504] env[61545]: _type = "Task" [ 772.224504] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.234136] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.330927] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Releasing lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.331247] env[61545]: DEBUG nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Instance network_info: |[{"id": "1d429567-3342-4107-b5f0-2596a7002020", "address": "fa:16:3e:67:8f:5d", "network": {"id": "1d9a30ee-02cb-4684-9776-a725dcfd5617", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-857592221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "723b7377d6c8413cb6ef900c8404b0b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf99dce-c773-48db-a2d9-00b8d0a7c75d", "external-id": "nsx-vlan-transportzone-248", "segmentation_id": 248, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d429567-33", "ovs_interfaceid": "1d429567-3342-4107-b5f0-2596a7002020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 772.334588] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:8f:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf99dce-c773-48db-a2d9-00b8d0a7c75d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d429567-3342-4107-b5f0-2596a7002020', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.342284] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Creating folder: Project (723b7377d6c8413cb6ef900c8404b0b5). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 772.343574] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47fa20e7-5762-44e0-853f-6efbeacf1ffd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.361984] env[61545]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 772.361984] env[61545]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61545) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 772.362393] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Folder already exists: Project (723b7377d6c8413cb6ef900c8404b0b5). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 772.362393] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Creating folder: Instances. Parent ref: group-v838582. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 772.362647] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cc267af-20c1-4c0d-afb1-ca81bbac8fc8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.379310] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Created folder: Instances in parent group-v838582. [ 772.379310] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.379310] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.379310] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd629489-42e0-49a8-a44c-144d678210e1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.409449] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.409449] env[61545]: value = "task-4255612" [ 772.409449] env[61545]: _type = "Task" [ 772.409449] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.425279] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255612, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.652879] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577633} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.653394] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.653924] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.654815] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b9e13ae-a507-4510-ae74-f4b3d4c1c642 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.664695] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 772.664695] env[61545]: value = "task-4255613" [ 772.664695] env[61545]: _type = "Task" [ 772.664695] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.681245] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255613, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.742427] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255609, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.771964] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updated VIF entry in instance network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 772.772343] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updating instance_info_cache with network_info: [{"id": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "address": "fa:16:3e:9d:12:c4", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4792d3c-8f", "ovs_interfaceid": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.806152] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "5a284df5-88ea-43bf-9944-ef344f99591c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.806487] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.806715] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.806905] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.807137] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.810290] env[61545]: INFO nova.compute.manager [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Terminating instance [ 772.897356] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35515cb1-c95d-401d-acb7-9e01d1a7d797 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.908074] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5425b5e6-cd07-4ed2-a450-0d1f5ba799c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.922133] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255612, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.948580] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8641a83-25cd-4381-8d67-0684d6cc2649 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.956944] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be19848a-8808-4cf5-b29c-254011d59a37 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.971709] env[61545]: DEBUG nova.compute.provider_tree [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.177838] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255613, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079493} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.178161] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 773.179094] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d7538e-1238-4581-bab5-a5ce1312f671 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.205127] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.205127] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b099746d-108f-45f5-be0f-91133492a56c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.224962] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 773.224962] env[61545]: value = "task-4255614" [ 773.224962] env[61545]: _type = "Task" [ 773.224962] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.240294] env[61545]: DEBUG oslo_vmware.api [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255609, 'name': PowerOnVM_Task, 'duration_secs': 0.661004} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.242249] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 773.242485] env[61545]: INFO nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Took 10.35 seconds to spawn the instance on the hypervisor. [ 773.242676] env[61545]: DEBUG nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 773.246392] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255614, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.246392] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e709284-d3bd-4d1b-b999-446ea7e40c21 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.276044] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Releasing lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.276644] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Received event network-changed-9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 773.277725] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Refreshing instance network info cache due to event network-changed-9cc5f1dc-e836-46e6-8584-5c4d98ba5241. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 773.277725] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquiring lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.277725] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquired lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.277725] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Refreshing network info cache for port 9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.318334] env[61545]: DEBUG nova.compute.manager [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 773.319055] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 773.323519] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437f986e-6952-4f91-9883-ca3be0b29cea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.333184] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 773.333418] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8deb0df4-af46-4314-9996-cf7241fa30a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.342180] env[61545]: DEBUG oslo_vmware.api [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 773.342180] env[61545]: value = "task-4255615" [ 773.342180] env[61545]: _type = "Task" [ 773.342180] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.353701] env[61545]: DEBUG oslo_vmware.api [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.423619] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255612, 'name': CreateVM_Task, 'duration_secs': 0.538307} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.423835] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.427392] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'delete_on_termination': True, 'boot_index': 0, 'device_type': None, 'attachment_id': 'b918790a-2a16-42e2-8fa7-40b79e529b95', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838592', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'name': 'volume-7578d319-27d0-4d5d-99aa-9bce7818396d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '26e339f1-182b-4f00-b7c2-a2a32e942d04', 'attached_at': '', 'detached_at': '', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'serial': '7578d319-27d0-4d5d-99aa-9bce7818396d'}, 'guest_format': None, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=61545) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 773.427392] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Root volume attach. Driver type: vmdk {{(pid=61545) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 773.427392] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a1ccbc-d8d3-4b93-bd97-781957bd983e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.438299] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7c6991-c83e-4cb6-b40f-eb2a86ee9d94 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.447222] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c796278-fb43-409c-8d98-638dd61da735 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.457510] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-66e25371-ce6b-4f58-8a2d-af94f1cf3e73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.467315] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 773.467315] env[61545]: value = "task-4255616" [ 773.467315] env[61545]: _type = "Task" [ 773.467315] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.481984] env[61545]: DEBUG nova.scheduler.client.report [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 773.485043] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.647594] env[61545]: DEBUG nova.network.neutron [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Successfully updated port: 71a8926c-7ea6-4f74-b990-81464c47f0c1 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 773.743876] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255614, 'name': ReconfigVM_Task, 'duration_secs': 0.418064} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.744317] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 773.745440] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f60a5eca-40cc-401d-88eb-2514c8c7fe35 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.761254] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 773.761254] env[61545]: value = "task-4255617" [ 773.761254] env[61545]: _type = "Task" [ 773.761254] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.769343] env[61545]: INFO nova.compute.manager [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Took 44.91 seconds to build instance. [ 773.774343] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255617, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.853746] env[61545]: DEBUG oslo_vmware.api [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255615, 'name': PowerOffVM_Task, 'duration_secs': 0.239562} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.856633] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 773.856935] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 773.857657] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bee580c9-2163-47ba-81dc-dedbf0508dc2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.931878] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 773.932166] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 773.932360] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Deleting the datastore file [datastore2] 5a284df5-88ea-43bf-9944-ef344f99591c {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 773.932680] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cd76c20-58e2-4d24-a5b4-d17c9c942962 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.943867] env[61545]: DEBUG oslo_vmware.api [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 773.943867] env[61545]: value = "task-4255619" [ 773.943867] env[61545]: _type = "Task" [ 773.943867] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.957465] env[61545]: DEBUG oslo_vmware.api [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.984410] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 35%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.987660] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.344s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.990379] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.953s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.992219] env[61545]: INFO nova.compute.claims [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.016560] env[61545]: INFO nova.scheduler.client.report [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Deleted allocations for instance 9dbff26a-210c-4e80-812f-c91debe3e9c1 [ 774.150536] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "refresh_cache-8d838d3b-32ad-4bb2-839e-6bd81c363447" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.150915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "refresh_cache-8d838d3b-32ad-4bb2-839e-6bd81c363447" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.151172] env[61545]: DEBUG nova.network.neutron [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 774.235149] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updated VIF entry in instance network info cache for port 9cc5f1dc-e836-46e6-8584-5c4d98ba5241. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.235639] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updating instance_info_cache with network_info: [{"id": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "address": "fa:16:3e:e3:1c:91", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cc5f1dc-e8", "ovs_interfaceid": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.276849] env[61545]: DEBUG oslo_concurrency.lockutils [None req-acf0562a-fdd0-4412-9166-0da8feace1bc tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "2a0576f9-d740-4dfa-9783-17eb3987840b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.890s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.277564] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255617, 'name': Rename_Task, 'duration_secs': 0.171489} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.278882] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 774.279182] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9c47d36-5738-42f1-8b38-7253962f5f47 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.289751] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 774.289751] env[61545]: value = "task-4255620" [ 774.289751] env[61545]: _type = "Task" [ 774.289751] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.305942] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.457564] env[61545]: DEBUG nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Received event network-changed-9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 774.457771] env[61545]: DEBUG nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Refreshing instance network info cache due to event network-changed-9cc5f1dc-e836-46e6-8584-5c4d98ba5241. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 774.458177] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Acquiring lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.462049] env[61545]: DEBUG oslo_vmware.api [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289632} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.462534] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 774.462701] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 774.462760] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 774.462948] env[61545]: INFO nova.compute.manager [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 774.463331] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 774.463446] env[61545]: DEBUG nova.compute.manager [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 774.463499] env[61545]: DEBUG nova.network.neutron [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 774.489501] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 49%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.525481] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6628d89d-5925-4596-b5ed-b5c6babdf028 tempest-ServerExternalEventsTest-31429800 tempest-ServerExternalEventsTest-31429800-project-member] Lock "9dbff26a-210c-4e80-812f-c91debe3e9c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.588s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.740911] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Releasing lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.741612] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Received event network-changed-d289bc06-c77a-460a-b15d-e94dcfb3ff53 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 774.741612] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Refreshing instance network info cache due to event network-changed-d289bc06-c77a-460a-b15d-e94dcfb3ff53. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 774.741774] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquiring lock "refresh_cache-7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.741826] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquired lock "refresh_cache-7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.741960] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Refreshing network info cache for port d289bc06-c77a-460a-b15d-e94dcfb3ff53 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.743330] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Acquired lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.743526] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Refreshing network info cache for port 9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.763883] env[61545]: DEBUG nova.network.neutron [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.790271] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 774.804144] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255620, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.987740] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 62%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.308568] env[61545]: DEBUG oslo_vmware.api [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255620, 'name': PowerOnVM_Task, 'duration_secs': 0.589026} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.308848] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 775.309066] env[61545]: INFO nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Took 9.57 seconds to spawn the instance on the hypervisor. [ 775.309386] env[61545]: DEBUG nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 775.313093] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5f083e-652b-4952-84d1-a4d2deb9cc26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.318191] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.492615] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 75%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.590335] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updated VIF entry in instance network info cache for port 9cc5f1dc-e836-46e6-8584-5c4d98ba5241. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.590507] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updating instance_info_cache with network_info: [{"id": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "address": "fa:16:3e:e3:1c:91", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cc5f1dc-e8", "ovs_interfaceid": "9cc5f1dc-e836-46e6-8584-5c4d98ba5241", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.687576] env[61545]: DEBUG nova.network.neutron [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Updating instance_info_cache with network_info: [{"id": "71a8926c-7ea6-4f74-b990-81464c47f0c1", "address": "fa:16:3e:63:73:9b", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a8926c-7e", "ovs_interfaceid": "71a8926c-7ea6-4f74-b990-81464c47f0c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.770122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad63953-e5e3-4294-9beb-062827bab35a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.782412] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8caf3861-25b6-425e-a159-4ca4de178abb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.827608] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c0fa6d-a57e-4274-b5eb-7d11408b3d70 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.849091] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65be7aef-e226-4041-9a59-6fdc9d1322c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.854636] env[61545]: INFO nova.compute.manager [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Took 43.75 seconds to build instance. [ 775.874581] env[61545]: DEBUG nova.compute.provider_tree [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 775.986921] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.052261] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Updated VIF entry in instance network info cache for port d289bc06-c77a-460a-b15d-e94dcfb3ff53. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 776.052753] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Updating instance_info_cache with network_info: [{"id": "d289bc06-c77a-460a-b15d-e94dcfb3ff53", "address": "fa:16:3e:27:ab:48", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd289bc06-c7", "ovs_interfaceid": "d289bc06-c77a-460a-b15d-e94dcfb3ff53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.094196] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Releasing lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.097023] env[61545]: DEBUG nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Received event network-vif-plugged-71a8926c-7ea6-4f74-b990-81464c47f0c1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 776.097023] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Acquiring lock "8d838d3b-32ad-4bb2-839e-6bd81c363447-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.097023] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Lock "8d838d3b-32ad-4bb2-839e-6bd81c363447-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.097023] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Lock "8d838d3b-32ad-4bb2-839e-6bd81c363447-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.097023] env[61545]: DEBUG nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] No waiting events found dispatching network-vif-plugged-71a8926c-7ea6-4f74-b990-81464c47f0c1 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 776.097369] env[61545]: WARNING nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Received unexpected event network-vif-plugged-71a8926c-7ea6-4f74-b990-81464c47f0c1 for instance with vm_state building and task_state spawning. [ 776.097369] env[61545]: DEBUG nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Received event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 776.097369] env[61545]: DEBUG nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing instance network info cache due to event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 776.097369] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Acquiring lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.097369] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Acquired lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.097527] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.190686] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "refresh_cache-8d838d3b-32ad-4bb2-839e-6bd81c363447" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.194159] env[61545]: DEBUG nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Instance network_info: |[{"id": "71a8926c-7ea6-4f74-b990-81464c47f0c1", "address": "fa:16:3e:63:73:9b", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a8926c-7e", "ovs_interfaceid": "71a8926c-7ea6-4f74-b990-81464c47f0c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 776.194266] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:73:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71a8926c-7ea6-4f74-b990-81464c47f0c1', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.203630] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.204402] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 776.204785] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15ef7627-8efd-4120-8195-eccf7933f7af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.233018] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.233018] env[61545]: value = "task-4255621" [ 776.233018] env[61545]: _type = "Task" [ 776.233018] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.242454] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255621, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.331662] env[61545]: DEBUG nova.network.neutron [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.362097] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0091d272-9f11-4eb0-90b5-ed2562a3d3ae tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.037s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.405906] env[61545]: ERROR nova.scheduler.client.report [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [req-07629cad-ae8f-43ec-ab71-2114321cb758] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-07629cad-ae8f-43ec-ab71-2114321cb758"}]} [ 776.429974] env[61545]: DEBUG nova.scheduler.client.report [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 776.461850] env[61545]: DEBUG nova.scheduler.client.report [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 776.462249] env[61545]: DEBUG nova.compute.provider_tree [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 776.480637] env[61545]: DEBUG nova.scheduler.client.report [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 776.492727] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.511248] env[61545]: DEBUG nova.scheduler.client.report [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 776.556557] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Releasing lock "refresh_cache-7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.556733] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Received event network-vif-plugged-1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 776.556996] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquiring lock "26e339f1-182b-4f00-b7c2-a2a32e942d04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.557275] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Lock "26e339f1-182b-4f00-b7c2-a2a32e942d04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.557405] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Lock "26e339f1-182b-4f00-b7c2-a2a32e942d04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.557925] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] No waiting events found dispatching network-vif-plugged-1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 776.557925] env[61545]: WARNING nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Received unexpected event network-vif-plugged-1d429567-3342-4107-b5f0-2596a7002020 for instance with vm_state building and task_state spawning. [ 776.558022] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Received event network-changed-1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 776.558249] env[61545]: DEBUG nova.compute.manager [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Refreshing instance network info cache due to event network-changed-1d429567-3342-4107-b5f0-2596a7002020. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 776.558442] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquiring lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.558677] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Acquired lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.558757] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Refreshing network info cache for port 1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.745302] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255621, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.837647] env[61545]: INFO nova.compute.manager [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Took 2.37 seconds to deallocate network for instance. [ 776.864048] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 776.986879] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.029305] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updated VIF entry in instance network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.029305] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updating instance_info_cache with network_info: [{"id": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "address": "fa:16:3e:9d:12:c4", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4792d3c-8f", "ovs_interfaceid": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.176403] env[61545]: DEBUG nova.compute.manager [req-1163375a-c542-4591-a2a8-44dabba2550b req-8c0e26aa-ef8f-4453-9d08-c26ecdda118d service nova] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Received event network-vif-deleted-9cc5f1dc-e836-46e6-8584-5c4d98ba5241 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 777.239319] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7419db-041b-4bd2-9550-f6710c283290 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.253517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef75ff09-61d9-4ed5-b7e2-52c812945c5c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.260160] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255621, 'name': CreateVM_Task, 'duration_secs': 0.800662} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.260355] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.261694] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.261853] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.262214] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 777.262540] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ba3337c-e1f0-44ae-b4af-df8a969b82f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.300519] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699b4cd6-c30e-47fd-ba8f-abd1441850ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.306495] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 777.306495] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52916b35-c1d7-7c33-65cc-b0481500f15f" [ 777.306495] env[61545]: _type = "Task" [ 777.306495] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.315449] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8ac7c2-c965-4046-8d6f-b18c4d5ad53a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.323291] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52916b35-c1d7-7c33-65cc-b0481500f15f, 'name': SearchDatastore_Task, 'duration_secs': 0.011115} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.324165] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.324683] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 777.324775] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.324893] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.325145] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 777.325911] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b82a336e-68ff-49ed-b0fe-118a04b792b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.336307] env[61545]: DEBUG nova.compute.provider_tree [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.348644] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.348931] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 777.350614] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.350881] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ff5a276-fb94-414a-80da-48d6e21d6623 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.360370] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 777.360370] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521182b4-f49b-46a2-ff52-8ed703787d5d" [ 777.360370] env[61545]: _type = "Task" [ 777.360370] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.373394] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521182b4-f49b-46a2-ff52-8ed703787d5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.392125] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.486031] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.520169] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Updated VIF entry in instance network info cache for port 1d429567-3342-4107-b5f0-2596a7002020. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.520582] env[61545]: DEBUG nova.network.neutron [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Updating instance_info_cache with network_info: [{"id": "1d429567-3342-4107-b5f0-2596a7002020", "address": "fa:16:3e:67:8f:5d", "network": {"id": "1d9a30ee-02cb-4684-9776-a725dcfd5617", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-857592221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "723b7377d6c8413cb6ef900c8404b0b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf99dce-c773-48db-a2d9-00b8d0a7c75d", "external-id": "nsx-vlan-transportzone-248", "segmentation_id": 248, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d429567-33", "ovs_interfaceid": "1d429567-3342-4107-b5f0-2596a7002020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.539164] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Releasing lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.539665] env[61545]: DEBUG nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Received event network-changed-71a8926c-7ea6-4f74-b990-81464c47f0c1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 777.539889] env[61545]: DEBUG nova.compute.manager [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Refreshing instance network info cache due to event network-changed-71a8926c-7ea6-4f74-b990-81464c47f0c1. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 777.540173] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Acquiring lock "refresh_cache-8d838d3b-32ad-4bb2-839e-6bd81c363447" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.540336] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Acquired lock "refresh_cache-8d838d3b-32ad-4bb2-839e-6bd81c363447" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.540508] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Refreshing network info cache for port 71a8926c-7ea6-4f74-b990-81464c47f0c1 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.839634] env[61545]: DEBUG nova.scheduler.client.report [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.853112] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "5a284df5-88ea-43bf-9944-ef344f99591c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.877597] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521182b4-f49b-46a2-ff52-8ed703787d5d, 'name': SearchDatastore_Task, 'duration_secs': 0.011342} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.878476] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f180a7c-d892-4e89-b8af-895ca728ef70 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.885673] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 777.885673] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524cd217-b694-f5cc-d551-cca09c1852ea" [ 777.885673] env[61545]: _type = "Task" [ 777.885673] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.896333] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524cd217-b694-f5cc-d551-cca09c1852ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.985832] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255616, 'name': RelocateVM_Task, 'duration_secs': 4.023314} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.987660] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 777.987851] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838592', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'name': 'volume-7578d319-27d0-4d5d-99aa-9bce7818396d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '26e339f1-182b-4f00-b7c2-a2a32e942d04', 'attached_at': '', 'detached_at': '', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'serial': '7578d319-27d0-4d5d-99aa-9bce7818396d'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 777.990942] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbef9c73-26d0-4409-9652-7aaca3a6063b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.023276] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a9f596-5af5-4364-a2a6-b55d7acbd5de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.026539] env[61545]: DEBUG oslo_concurrency.lockutils [req-e35f1370-3d68-4874-8458-9b94606abb8e req-819abe62-6878-4a65-bdb0-986d22b29392 service nova] Releasing lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.055695] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] volume-7578d319-27d0-4d5d-99aa-9bce7818396d/volume-7578d319-27d0-4d5d-99aa-9bce7818396d.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.055695] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9250f3e-7e92-487a-a372-44014d0182f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.085043] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 778.085043] env[61545]: value = "task-4255622" [ 778.085043] env[61545]: _type = "Task" [ 778.085043] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.093982] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255622, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.347935] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.357s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.348592] env[61545]: DEBUG nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 778.351953] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.865s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.352286] env[61545]: DEBUG nova.objects.instance [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lazy-loading 'resources' on Instance uuid 26a6b40e-f8a4-4cc6-bdbb-586ca592901c {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 778.407831] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524cd217-b694-f5cc-d551-cca09c1852ea, 'name': SearchDatastore_Task, 'duration_secs': 0.011239} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.409144] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.409480] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8d838d3b-32ad-4bb2-839e-6bd81c363447/8d838d3b-32ad-4bb2-839e-6bd81c363447.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 778.409805] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-337b63fb-95f7-4a31-a1ac-0a4bf28cdfb1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.418594] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 778.418594] env[61545]: value = "task-4255623" [ 778.418594] env[61545]: _type = "Task" [ 778.418594] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.429965] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.496955] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquiring lock "1722d63d-e604-44fe-8198-13e6c5bce016" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.497499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "1722d63d-e604-44fe-8198-13e6c5bce016" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.497761] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquiring lock "1722d63d-e604-44fe-8198-13e6c5bce016-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.497992] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "1722d63d-e604-44fe-8198-13e6c5bce016-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.498240] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "1722d63d-e604-44fe-8198-13e6c5bce016-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.501694] env[61545]: INFO nova.compute.manager [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Terminating instance [ 778.603553] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255622, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.637377] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Updated VIF entry in instance network info cache for port 71a8926c-7ea6-4f74-b990-81464c47f0c1. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 778.637748] env[61545]: DEBUG nova.network.neutron [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Updating instance_info_cache with network_info: [{"id": "71a8926c-7ea6-4f74-b990-81464c47f0c1", "address": "fa:16:3e:63:73:9b", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a8926c-7e", "ovs_interfaceid": "71a8926c-7ea6-4f74-b990-81464c47f0c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.856285] env[61545]: DEBUG nova.compute.utils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 778.862078] env[61545]: DEBUG nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 778.863313] env[61545]: DEBUG nova.network.neutron [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 778.932214] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255623, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.991912] env[61545]: DEBUG nova.policy [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b8dbbeb348b4da89b72f23187f36e6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17d73ecd0fb24d80a2269a891dc3cdbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 779.007238] env[61545]: DEBUG nova.compute.manager [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 779.007913] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 779.009008] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f276f1-ec47-401f-a568-f0f8115df817 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.021355] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.021955] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aea9d111-45e2-4a6f-9fe7-7d5935e6fdee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.029920] env[61545]: DEBUG oslo_vmware.api [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 779.029920] env[61545]: value = "task-4255624" [ 779.029920] env[61545]: _type = "Task" [ 779.029920] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.039905] env[61545]: DEBUG oslo_vmware.api [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255624, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.099040] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255622, 'name': ReconfigVM_Task, 'duration_secs': 0.549727} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.099508] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Reconfigured VM instance instance-0000001f to attach disk [datastore2] volume-7578d319-27d0-4d5d-99aa-9bce7818396d/volume-7578d319-27d0-4d5d-99aa-9bce7818396d.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.109495] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7a761eb-d79e-4b4e-ade7-7ef2f8b7b4be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.128104] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 779.128104] env[61545]: value = "task-4255625" [ 779.128104] env[61545]: _type = "Task" [ 779.128104] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.150927] env[61545]: DEBUG oslo_concurrency.lockutils [req-500c52f8-858b-479c-b9a8-a14fb8c1eabc req-efaea970-a3a5-4c04-b179-c60287a4f1d4 service nova] Releasing lock "refresh_cache-8d838d3b-32ad-4bb2-839e-6bd81c363447" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.152044] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255625, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.366514] env[61545]: DEBUG nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 779.435395] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255623, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526535} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.438688] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8d838d3b-32ad-4bb2-839e-6bd81c363447/8d838d3b-32ad-4bb2-839e-6bd81c363447.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.439169] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.441745] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c31500d5-abe4-4eba-b4c6-9ca2fc1e1b08 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.449032] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 779.449032] env[61545]: value = "task-4255626" [ 779.449032] env[61545]: _type = "Task" [ 779.449032] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.461739] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.543995] env[61545]: DEBUG oslo_vmware.api [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255624, 'name': PowerOffVM_Task, 'duration_secs': 0.212576} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.544297] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.544470] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 779.544729] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8216a63f-b70a-49ad-97b8-c043cad023be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.611579] env[61545]: DEBUG nova.compute.manager [req-beda08ee-3e70-4c34-bfd0-1db6b85d941c req-1611198b-049f-4b5a-9fb6-558ed11ef532 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Received event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 779.611896] env[61545]: DEBUG nova.compute.manager [req-beda08ee-3e70-4c34-bfd0-1db6b85d941c req-1611198b-049f-4b5a-9fb6-558ed11ef532 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing instance network info cache due to event network-changed-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 779.612456] env[61545]: DEBUG oslo_concurrency.lockutils [req-beda08ee-3e70-4c34-bfd0-1db6b85d941c req-1611198b-049f-4b5a-9fb6-558ed11ef532 service nova] Acquiring lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.614205] env[61545]: DEBUG oslo_concurrency.lockutils [req-beda08ee-3e70-4c34-bfd0-1db6b85d941c req-1611198b-049f-4b5a-9fb6-558ed11ef532 service nova] Acquired lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.614205] env[61545]: DEBUG nova.network.neutron [req-beda08ee-3e70-4c34-bfd0-1db6b85d941c req-1611198b-049f-4b5a-9fb6-558ed11ef532 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Refreshing network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.625603] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 779.625603] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 779.626082] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Deleting the datastore file [datastore2] 1722d63d-e604-44fe-8198-13e6c5bce016 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 779.629591] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a59f8e80-03e4-4b35-92bc-ac572ee6e445 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.652020] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255625, 'name': ReconfigVM_Task, 'duration_secs': 0.182203} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.652020] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838592', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'name': 'volume-7578d319-27d0-4d5d-99aa-9bce7818396d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '26e339f1-182b-4f00-b7c2-a2a32e942d04', 'attached_at': '', 'detached_at': '', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'serial': '7578d319-27d0-4d5d-99aa-9bce7818396d'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 779.652020] env[61545]: DEBUG oslo_vmware.api [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for the task: (returnval){ [ 779.652020] env[61545]: value = "task-4255628" [ 779.652020] env[61545]: _type = "Task" [ 779.652020] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.653490] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac62e418-513b-4d8c-8be5-cd01bdfab58d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.656346] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a4e08b-4341-4d14-9bc5-59cdafa5729e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.676545] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5770431f-1d10-428b-85a4-38c4fdc097a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.680137] env[61545]: DEBUG oslo_vmware.api [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255628, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.680484] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 779.680484] env[61545]: value = "task-4255629" [ 779.680484] env[61545]: _type = "Task" [ 779.680484] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.712619] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2796ee-6c76-4cce-9b21-cefbb17d293b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.718653] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255629, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.724276] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7ee38f-7915-43fb-b46b-ca1a9aa3cd37 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.739624] env[61545]: DEBUG nova.compute.provider_tree [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.741653] env[61545]: DEBUG nova.network.neutron [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Successfully created port: f6cead7f-5ede-4097-9f73-f9849bdc96bc {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.930206] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.930935] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.963024] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076094} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.963024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.963024] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4563aa19-a13c-40c5-b762-98f72a59e023 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.990390] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 8d838d3b-32ad-4bb2-839e-6bd81c363447/8d838d3b-32ad-4bb2-839e-6bd81c363447.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.991261] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb5093eb-518a-4fbd-acd5-946de5777c3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.016914] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 780.016914] env[61545]: value = "task-4255630" [ 780.016914] env[61545]: _type = "Task" [ 780.016914] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.025167] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255630, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.168083] env[61545]: DEBUG oslo_vmware.api [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Task: {'id': task-4255628, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175949} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.168442] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 780.168605] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 780.168782] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.168956] env[61545]: INFO nova.compute.manager [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Took 1.16 seconds to destroy the instance on the hypervisor. [ 780.169225] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 780.169419] env[61545]: DEBUG nova.compute.manager [-] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 780.169510] env[61545]: DEBUG nova.network.neutron [-] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 780.191801] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255629, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.250150] env[61545]: DEBUG nova.scheduler.client.report [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.376909] env[61545]: DEBUG nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 780.418980] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 780.419356] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.419638] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 780.420048] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.420342] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 780.420540] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 780.420935] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 780.421212] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 780.421530] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 780.421816] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 780.422133] env[61545]: DEBUG nova.virt.hardware [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 780.423584] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326b4b41-8ad8-4e4b-b0c5-bbfb1c0a6c43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.436040] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52bde24-26e8-4bf1-aaf7-c3aee385b9dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.529832] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.692355] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255629, 'name': Rename_Task, 'duration_secs': 0.809228} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.696078] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 780.696383] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-628423e6-bd9d-4c86-abf6-1af185837116 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.704064] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 780.704064] env[61545]: value = "task-4255631" [ 780.704064] env[61545]: _type = "Task" [ 780.704064] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.724803] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255631, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.727981] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "9b62358e-c834-461c-9954-49f513b0f4ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.728275] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "9b62358e-c834-461c-9954-49f513b0f4ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.762026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.407s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.766147] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.066s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.766439] env[61545]: DEBUG nova.objects.instance [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lazy-loading 'resources' on Instance uuid 43cf4c96-2c8b-4520-8926-c1be5a87734e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.813721] env[61545]: INFO nova.scheduler.client.report [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted allocations for instance 26a6b40e-f8a4-4cc6-bdbb-586ca592901c [ 780.963599] env[61545]: DEBUG nova.network.neutron [req-beda08ee-3e70-4c34-bfd0-1db6b85d941c req-1611198b-049f-4b5a-9fb6-558ed11ef532 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updated VIF entry in instance network info cache for port b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.964865] env[61545]: DEBUG nova.network.neutron [req-beda08ee-3e70-4c34-bfd0-1db6b85d941c req-1611198b-049f-4b5a-9fb6-558ed11ef532 service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updating instance_info_cache with network_info: [{"id": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "address": "fa:16:3e:9d:12:c4", "network": {"id": "07a274f5-2d37-474c-9be8-4719c638f0ce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2032708313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e060fc87d3ea4aa9bb25853eeeca3c23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4792d3c-8f", "ovs_interfaceid": "b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.030383] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255630, 'name': ReconfigVM_Task, 'duration_secs': 0.783081} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.031034] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 8d838d3b-32ad-4bb2-839e-6bd81c363447/8d838d3b-32ad-4bb2-839e-6bd81c363447.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.031335] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d840da0-a26c-4342-86f4-bdd6ab850f2b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.039309] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 781.039309] env[61545]: value = "task-4255632" [ 781.039309] env[61545]: _type = "Task" [ 781.039309] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.057713] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255632, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.217457] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255631, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.325491] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cda259a4-88d4-4524-9994-e4c1c57a72c2 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "26a6b40e-f8a4-4cc6-bdbb-586ca592901c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.380s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.470047] env[61545]: DEBUG oslo_concurrency.lockutils [req-beda08ee-3e70-4c34-bfd0-1db6b85d941c req-1611198b-049f-4b5a-9fb6-558ed11ef532 service nova] Releasing lock "refresh_cache-d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.496216] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.496475] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.523610] env[61545]: DEBUG nova.network.neutron [-] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.556594] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255632, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.719585] env[61545]: DEBUG oslo_vmware.api [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255631, 'name': PowerOnVM_Task, 'duration_secs': 0.75748} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.719585] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 781.719585] env[61545]: INFO nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Took 11.35 seconds to spawn the instance on the hypervisor. [ 781.719585] env[61545]: DEBUG nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.725134] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b935328-3220-434c-af94-98379315451b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.982432] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54867a95-b707-4f64-85bd-093cd50e2f3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.991296] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98eb56c-d44c-4423-bd3c-230b78723ec7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.996996] env[61545]: DEBUG nova.compute.manager [req-c43c741c-d791-48cf-a778-89524efecf28 req-d0888525-0d9e-4fd2-9d9e-0b7bb090a36e service nova] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Received event network-vif-deleted-20ad03f5-59d2-4cd0-b053-4d59cc02f11b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 782.029031] env[61545]: INFO nova.compute.manager [-] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Took 1.86 seconds to deallocate network for instance. [ 782.031456] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6991e04a-0b52-4c20-a418-7cde7fa0e562 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.046227] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc0abe0-0058-469a-b1c2-9ade999969e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.057434] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255632, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.067222] env[61545]: DEBUG nova.compute.provider_tree [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.149105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.149342] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.214126] env[61545]: DEBUG nova.network.neutron [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Successfully updated port: f6cead7f-5ede-4097-9f73-f9849bdc96bc {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 782.247556] env[61545]: INFO nova.compute.manager [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Took 46.04 seconds to build instance. [ 782.267521] env[61545]: DEBUG nova.objects.instance [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lazy-loading 'flavor' on Instance uuid 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 782.539730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.551676] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255632, 'name': Rename_Task, 'duration_secs': 1.175569} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.551676] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.551998] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cddc6bb5-bb05-4eec-994b-1a222e6bd9d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.558944] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 782.558944] env[61545]: value = "task-4255633" [ 782.558944] env[61545]: _type = "Task" [ 782.558944] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.568096] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.570150] env[61545]: DEBUG nova.scheduler.client.report [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.722118] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "refresh_cache-5f4d6338-d1af-4e58-9f76-5e95d51e76f7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.722118] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquired lock "refresh_cache-5f4d6338-d1af-4e58-9f76-5e95d51e76f7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.722118] env[61545]: DEBUG nova.network.neutron [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.750077] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec9b4d15-ac5d-48a9-b0de-ea2e16c7e1bc tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "26e339f1-182b-4f00-b7c2-a2a32e942d04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.133s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.773624] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.773876] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquired lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.996820] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquiring lock "2a0576f9-d740-4dfa-9783-17eb3987840b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.997204] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "2a0576f9-d740-4dfa-9783-17eb3987840b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.997374] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquiring lock "2a0576f9-d740-4dfa-9783-17eb3987840b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.997652] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "2a0576f9-d740-4dfa-9783-17eb3987840b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.997814] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "2a0576f9-d740-4dfa-9783-17eb3987840b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.000081] env[61545]: INFO nova.compute.manager [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Terminating instance [ 783.073519] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255633, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.075781] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.310s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.078546] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.175s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.080652] env[61545]: INFO nova.compute.claims [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.111836] env[61545]: INFO nova.scheduler.client.report [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Deleted allocations for instance 43cf4c96-2c8b-4520-8926-c1be5a87734e [ 783.170481] env[61545]: DEBUG nova.compute.manager [req-7783c090-5b50-41af-99cf-43745dc8710f req-9b56f7fb-a18a-4ff5-a98e-493e0f3cfac5 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Received event network-changed-1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 783.170481] env[61545]: DEBUG nova.compute.manager [req-7783c090-5b50-41af-99cf-43745dc8710f req-9b56f7fb-a18a-4ff5-a98e-493e0f3cfac5 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Refreshing instance network info cache due to event network-changed-1d429567-3342-4107-b5f0-2596a7002020. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 783.170481] env[61545]: DEBUG oslo_concurrency.lockutils [req-7783c090-5b50-41af-99cf-43745dc8710f req-9b56f7fb-a18a-4ff5-a98e-493e0f3cfac5 service nova] Acquiring lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.170481] env[61545]: DEBUG oslo_concurrency.lockutils [req-7783c090-5b50-41af-99cf-43745dc8710f req-9b56f7fb-a18a-4ff5-a98e-493e0f3cfac5 service nova] Acquired lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.170481] env[61545]: DEBUG nova.network.neutron [req-7783c090-5b50-41af-99cf-43745dc8710f req-9b56f7fb-a18a-4ff5-a98e-493e0f3cfac5 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Refreshing network info cache for port 1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.252716] env[61545]: DEBUG nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.300119] env[61545]: DEBUG nova.network.neutron [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.505417] env[61545]: DEBUG nova.compute.manager [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 783.505417] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.505417] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216a9a09-de1b-4819-8a1c-4a43882b5d5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.514171] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.514429] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5941fc0b-f4dd-4174-ad30-28a31404706f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.521134] env[61545]: DEBUG oslo_vmware.api [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 783.521134] env[61545]: value = "task-4255634" [ 783.521134] env[61545]: _type = "Task" [ 783.521134] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.530388] env[61545]: DEBUG oslo_vmware.api [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255634, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.574420] env[61545]: DEBUG oslo_vmware.api [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255633, 'name': PowerOnVM_Task, 'duration_secs': 0.540773} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.578106] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.578425] env[61545]: INFO nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Took 12.20 seconds to spawn the instance on the hypervisor. [ 783.578688] env[61545]: DEBUG nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.581157] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf5975e-0136-4268-9f97-a1e5a2075200 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.624648] env[61545]: DEBUG oslo_concurrency.lockutils [None req-776f39b6-d89b-478c-b75c-d5e91de97b61 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "43cf4c96-2c8b-4520-8926-c1be5a87734e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.123s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.629061] env[61545]: DEBUG nova.network.neutron [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.649330] env[61545]: DEBUG nova.network.neutron [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Updating instance_info_cache with network_info: [{"id": "f6cead7f-5ede-4097-9f73-f9849bdc96bc", "address": "fa:16:3e:51:90:38", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6cead7f-5e", "ovs_interfaceid": "f6cead7f-5ede-4097-9f73-f9849bdc96bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.782723] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.034601] env[61545]: DEBUG oslo_vmware.api [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255634, 'name': PowerOffVM_Task, 'duration_secs': 0.279711} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.035164] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 784.035305] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 784.035728] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e3a2687-8f8a-4c20-9c0b-7ac77ccfd2c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.053768] env[61545]: DEBUG nova.network.neutron [req-7783c090-5b50-41af-99cf-43745dc8710f req-9b56f7fb-a18a-4ff5-a98e-493e0f3cfac5 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Updated VIF entry in instance network info cache for port 1d429567-3342-4107-b5f0-2596a7002020. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.054139] env[61545]: DEBUG nova.network.neutron [req-7783c090-5b50-41af-99cf-43745dc8710f req-9b56f7fb-a18a-4ff5-a98e-493e0f3cfac5 service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Updating instance_info_cache with network_info: [{"id": "1d429567-3342-4107-b5f0-2596a7002020", "address": "fa:16:3e:67:8f:5d", "network": {"id": "1d9a30ee-02cb-4684-9776-a725dcfd5617", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-857592221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "723b7377d6c8413cb6ef900c8404b0b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf99dce-c773-48db-a2d9-00b8d0a7c75d", "external-id": "nsx-vlan-transportzone-248", "segmentation_id": 248, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d429567-33", "ovs_interfaceid": "1d429567-3342-4107-b5f0-2596a7002020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.074354] env[61545]: DEBUG nova.compute.manager [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Received event network-vif-plugged-f6cead7f-5ede-4097-9f73-f9849bdc96bc {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 784.074605] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Acquiring lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.074866] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.075073] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.075278] env[61545]: DEBUG nova.compute.manager [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] No waiting events found dispatching network-vif-plugged-f6cead7f-5ede-4097-9f73-f9849bdc96bc {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 784.075491] env[61545]: WARNING nova.compute.manager [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Received unexpected event network-vif-plugged-f6cead7f-5ede-4097-9f73-f9849bdc96bc for instance with vm_state building and task_state spawning. [ 784.075675] env[61545]: DEBUG nova.compute.manager [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Received event network-changed-f6cead7f-5ede-4097-9f73-f9849bdc96bc {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 784.075819] env[61545]: DEBUG nova.compute.manager [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Refreshing instance network info cache due to event network-changed-f6cead7f-5ede-4097-9f73-f9849bdc96bc. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 784.076064] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Acquiring lock "refresh_cache-5f4d6338-d1af-4e58-9f76-5e95d51e76f7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.097392] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.097446] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.097638] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Deleting the datastore file [datastore2] 2a0576f9-d740-4dfa-9783-17eb3987840b {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.098982] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0096f07-8c71-4075-b739-a8475862d9ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.117539] env[61545]: INFO nova.compute.manager [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Took 44.12 seconds to build instance. [ 784.122706] env[61545]: DEBUG oslo_vmware.api [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for the task: (returnval){ [ 784.122706] env[61545]: value = "task-4255636" [ 784.122706] env[61545]: _type = "Task" [ 784.122706] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.138993] env[61545]: DEBUG oslo_vmware.api [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.156252] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Releasing lock "refresh_cache-5f4d6338-d1af-4e58-9f76-5e95d51e76f7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.156252] env[61545]: DEBUG nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Instance network_info: |[{"id": "f6cead7f-5ede-4097-9f73-f9849bdc96bc", "address": "fa:16:3e:51:90:38", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6cead7f-5e", "ovs_interfaceid": "f6cead7f-5ede-4097-9f73-f9849bdc96bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 784.157773] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Acquired lock "refresh_cache-5f4d6338-d1af-4e58-9f76-5e95d51e76f7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.158407] env[61545]: DEBUG nova.network.neutron [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Refreshing network info cache for port f6cead7f-5ede-4097-9f73-f9849bdc96bc {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.159784] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:90:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6cead7f-5ede-4097-9f73-f9849bdc96bc', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.168924] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Creating folder: Project (17d73ecd0fb24d80a2269a891dc3cdbb). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 784.170661] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1c571c9-88f2-47bc-b3e9-8a6f9e706410 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.188781] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Created folder: Project (17d73ecd0fb24d80a2269a891dc3cdbb) in parent group-v838542. [ 784.189083] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Creating folder: Instances. Parent ref: group-v838633. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 784.189387] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ab90a24-9276-4ad8-9d38-015736e66449 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.194636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "8a3ac91d-8949-4745-9161-1a70899c0293" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.194903] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "8a3ac91d-8949-4745-9161-1a70899c0293" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.195148] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "8a3ac91d-8949-4745-9161-1a70899c0293-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.195336] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "8a3ac91d-8949-4745-9161-1a70899c0293-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.195501] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "8a3ac91d-8949-4745-9161-1a70899c0293-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.201264] env[61545]: INFO nova.compute.manager [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Terminating instance [ 784.203894] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Created folder: Instances in parent group-v838633. [ 784.204227] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.205632] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 784.206123] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ca2ab77-8118-4d83-be30-fe5b1192fbee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.234676] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.234676] env[61545]: value = "task-4255639" [ 784.234676] env[61545]: _type = "Task" [ 784.234676] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.248315] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255639, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.483754] env[61545]: DEBUG nova.network.neutron [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.559351] env[61545]: DEBUG oslo_concurrency.lockutils [req-7783c090-5b50-41af-99cf-43745dc8710f req-9b56f7fb-a18a-4ff5-a98e-493e0f3cfac5 service nova] Releasing lock "refresh_cache-26e339f1-182b-4f00-b7c2-a2a32e942d04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.620708] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57e0f798-76ca-42cc-b8df-19bce33d678d tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "8d838d3b-32ad-4bb2-839e-6bd81c363447" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.303s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.636680] env[61545]: DEBUG oslo_vmware.api [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Task: {'id': task-4255636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.307806} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.640494] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.640734] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.640938] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.641147] env[61545]: INFO nova.compute.manager [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 784.641432] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.642201] env[61545]: DEBUG nova.compute.manager [-] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.642317] env[61545]: DEBUG nova.network.neutron [-] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.707304] env[61545]: DEBUG nova.compute.manager [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 784.707514] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 784.708632] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc74fa6-6397-4d85-b65b-bc5012ce2820 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.717906] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 784.718283] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8e93fd2-1eb9-49a4-9878-87b35f2d9189 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.726343] env[61545]: DEBUG oslo_vmware.api [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 784.726343] env[61545]: value = "task-4255640" [ 784.726343] env[61545]: _type = "Task" [ 784.726343] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.736966] env[61545]: DEBUG oslo_vmware.api [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.753466] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255639, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.810828] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4e198a-3772-41c1-881d-93658cec2f5d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.821085] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6771fe05-8cbd-4dd8-8a2e-d49fe9049170 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.859251] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8798f8f-0ef6-4230-8ada-84ffaebf01a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.867799] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2390fff7-62c2-4d32-a767-b41b03a75afa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.882589] env[61545]: DEBUG nova.compute.provider_tree [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.986751] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Releasing lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.987104] env[61545]: DEBUG nova.compute.manager [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Inject network info {{(pid=61545) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 784.987492] env[61545]: DEBUG nova.compute.manager [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] network_info to inject: |[{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 784.995467] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Reconfiguring VM instance to set the machine id {{(pid=61545) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 784.996079] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ed7a2de-ee84-4f56-8ec8-7542be8fb46a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.019827] env[61545]: DEBUG oslo_vmware.api [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 785.019827] env[61545]: value = "task-4255641" [ 785.019827] env[61545]: _type = "Task" [ 785.019827] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.030272] env[61545]: DEBUG oslo_vmware.api [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255641, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.126964] env[61545]: DEBUG nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 785.239405] env[61545]: DEBUG oslo_vmware.api [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255640, 'name': PowerOffVM_Task, 'duration_secs': 0.268756} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.243222] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 785.243443] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 785.243721] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b349bda-f080-454d-a239-aa86311ca335 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.256636] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255639, 'name': CreateVM_Task, 'duration_secs': 0.637792} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.256977] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.257741] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.257909] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.258268] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 785.258595] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5a3be07-a38a-4afe-81b0-6df7dc59c543 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.265806] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 785.265806] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520ef062-fafa-139d-072e-5b1e0abd48ce" [ 785.265806] env[61545]: _type = "Task" [ 785.265806] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.282381] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520ef062-fafa-139d-072e-5b1e0abd48ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.336460] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 785.336710] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 785.336844] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Deleting the datastore file [datastore2] 8a3ac91d-8949-4745-9161-1a70899c0293 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 785.337288] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aced121c-f1bb-4c5f-89bd-ff86f50c84bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.344568] env[61545]: DEBUG oslo_vmware.api [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for the task: (returnval){ [ 785.344568] env[61545]: value = "task-4255643" [ 785.344568] env[61545]: _type = "Task" [ 785.344568] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.358568] env[61545]: DEBUG oslo_vmware.api [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.387128] env[61545]: DEBUG nova.scheduler.client.report [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 785.483259] env[61545]: DEBUG nova.network.neutron [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Updated VIF entry in instance network info cache for port f6cead7f-5ede-4097-9f73-f9849bdc96bc. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 785.483259] env[61545]: DEBUG nova.network.neutron [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Updating instance_info_cache with network_info: [{"id": "f6cead7f-5ede-4097-9f73-f9849bdc96bc", "address": "fa:16:3e:51:90:38", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6cead7f-5e", "ovs_interfaceid": "f6cead7f-5ede-4097-9f73-f9849bdc96bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.530962] env[61545]: DEBUG oslo_vmware.api [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255641, 'name': ReconfigVM_Task, 'duration_secs': 0.267496} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.531276] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b63700-e8a9-450e-93f3-e19ecfdbfab5 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Reconfigured VM instance to set the machine id {{(pid=61545) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 785.653176] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.689989] env[61545]: DEBUG nova.objects.instance [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lazy-loading 'flavor' on Instance uuid 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.785676] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520ef062-fafa-139d-072e-5b1e0abd48ce, 'name': SearchDatastore_Task, 'duration_secs': 0.022314} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.787636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.787636] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 785.787636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.787636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.787898] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.787898] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f2b6bb0-30df-4049-83b6-35909c5f0f33 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.800305] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.800659] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 785.802880] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ef86315-c068-4aac-9c9e-7c8d8141e684 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.809331] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 785.809331] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521a1174-1e45-c527-dd50-4f8c60313c29" [ 785.809331] env[61545]: _type = "Task" [ 785.809331] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.818418] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521a1174-1e45-c527-dd50-4f8c60313c29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.856113] env[61545]: DEBUG oslo_vmware.api [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Task: {'id': task-4255643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292977} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.856395] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 785.856587] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 785.856767] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 785.858243] env[61545]: INFO nova.compute.manager [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Took 1.15 seconds to destroy the instance on the hypervisor. [ 785.858243] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.858243] env[61545]: DEBUG nova.compute.manager [-] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 785.858243] env[61545]: DEBUG nova.network.neutron [-] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 785.893129] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.814s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.894151] env[61545]: DEBUG nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 785.897499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.968s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.897578] env[61545]: DEBUG nova.objects.instance [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lazy-loading 'resources' on Instance uuid 72656070-cfd0-4104-a9c7-ec20c5a6238a {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.988150] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Releasing lock "refresh_cache-5f4d6338-d1af-4e58-9f76-5e95d51e76f7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.988470] env[61545]: DEBUG nova.compute.manager [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Received event network-changed-2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 785.988700] env[61545]: DEBUG nova.compute.manager [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Refreshing instance network info cache due to event network-changed-2b9f3635-8a28-4d33-be62-134aabc38027. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 785.988980] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Acquiring lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.989169] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Acquired lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.989397] env[61545]: DEBUG nova.network.neutron [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Refreshing network info cache for port 2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.027127] env[61545]: DEBUG nova.network.neutron [-] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.121861] env[61545]: DEBUG nova.compute.manager [req-722df3d2-9216-48f3-8e5f-61d57beb74e2 req-a5910559-42cf-44d7-b51f-53d2435eadfe service nova] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Received event network-vif-deleted-25571e3a-347e-4c8e-82cc-0f636f3bca8d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 786.195981] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.323889] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521a1174-1e45-c527-dd50-4f8c60313c29, 'name': SearchDatastore_Task, 'duration_secs': 0.013896} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.325179] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34d49a79-2483-40a7-b7dc-eaa17d0a726f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.332566] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 786.332566] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5280fd43-904f-72c6-ea9b-5bb2b9288fc4" [ 786.332566] env[61545]: _type = "Task" [ 786.332566] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.341378] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5280fd43-904f-72c6-ea9b-5bb2b9288fc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.403583] env[61545]: DEBUG nova.compute.utils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 786.404930] env[61545]: DEBUG nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 786.405497] env[61545]: DEBUG nova.network.neutron [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 786.532201] env[61545]: INFO nova.compute.manager [-] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Took 1.89 seconds to deallocate network for instance. [ 786.548488] env[61545]: DEBUG nova.policy [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b8dbbeb348b4da89b72f23187f36e6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17d73ecd0fb24d80a2269a891dc3cdbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.729890] env[61545]: DEBUG nova.network.neutron [-] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.845732] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5280fd43-904f-72c6-ea9b-5bb2b9288fc4, 'name': SearchDatastore_Task, 'duration_secs': 0.031373} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.846079] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.846360] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5f4d6338-d1af-4e58-9f76-5e95d51e76f7/5f4d6338-d1af-4e58-9f76-5e95d51e76f7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 786.849153] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60eb6d64-e78b-42e3-a13a-d3c1b2c023e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.857703] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 786.857703] env[61545]: value = "task-4255644" [ 786.857703] env[61545]: _type = "Task" [ 786.857703] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.872609] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255644, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.910091] env[61545]: DEBUG nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 787.038902] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.086422] env[61545]: DEBUG nova.network.neutron [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updated VIF entry in instance network info cache for port 2b9f3635-8a28-4d33-be62-134aabc38027. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.086536] env[61545]: DEBUG nova.network.neutron [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.108205] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886d5be9-5ebb-4d4a-bacc-63c0fc7e20e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.121801] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.121801] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.123139] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21222cec-2af9-4048-b5a3-8daf3ae358eb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.173793] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3396d738-d3fa-4a74-b260-a0df8269898f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.184142] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f83902-03e1-480d-b8b6-533080d42984 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.193134] env[61545]: DEBUG nova.network.neutron [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Successfully created port: 3ecc6a7f-17b3-4e11-92bd-19f00ab1364e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 787.209669] env[61545]: DEBUG nova.compute.provider_tree [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.233048] env[61545]: INFO nova.compute.manager [-] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Took 1.38 seconds to deallocate network for instance. [ 787.370954] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255644, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.589380] env[61545]: DEBUG oslo_concurrency.lockutils [req-6ad32ae4-65ef-44d8-9b1a-4300f2d02580 req-b2471439-5a31-4f0f-a3e3-ac5bda2e0ebd service nova] Releasing lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.589884] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquired lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.713344] env[61545]: DEBUG nova.scheduler.client.report [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.740919] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.870185] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255644, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.81047} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.870185] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5f4d6338-d1af-4e58-9f76-5e95d51e76f7/5f4d6338-d1af-4e58-9f76-5e95d51e76f7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.870390] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.870670] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ead70e0-36ae-4791-b1e8-c5e38947e4b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.878289] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 787.878289] env[61545]: value = "task-4255645" [ 787.878289] env[61545]: _type = "Task" [ 787.878289] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.887038] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255645, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.923169] env[61545]: DEBUG nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 787.962536] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.962536] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.962536] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.962536] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.962536] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.962536] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.962536] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.962536] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.962967] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.962967] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.963041] env[61545]: DEBUG nova.virt.hardware [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.964028] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c7001a-c41b-4a68-8631-a7bec9585273 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.972476] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8feed8e-5439-4347-9761-90df45db6ecd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.150927] env[61545]: DEBUG nova.network.neutron [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.182751] env[61545]: DEBUG nova.compute.manager [req-fdd6c06e-80ee-4f53-8785-3718279c4bee req-64656e46-ee93-4f64-810c-d2a3faabeeaf service nova] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Received event network-vif-deleted-e19a0a22-1c4e-4aa7-94cf-a5e630bb1857 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 788.219164] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.322s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.222449] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.163s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.222703] env[61545]: DEBUG nova.objects.instance [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lazy-loading 'resources' on Instance uuid 79ba6f70-c967-4abf-a2a7-c70046a2602d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.254521] env[61545]: INFO nova.scheduler.client.report [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleted allocations for instance 72656070-cfd0-4104-a9c7-ec20c5a6238a [ 788.388373] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255645, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068961} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.388654] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.389446] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe428974-a327-4b45-935b-7696151cb83a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.411767] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 5f4d6338-d1af-4e58-9f76-5e95d51e76f7/5f4d6338-d1af-4e58-9f76-5e95d51e76f7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.411856] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cb83723-bad2-45ab-9423-c03e823d38a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.433900] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 788.433900] env[61545]: value = "task-4255646" [ 788.433900] env[61545]: _type = "Task" [ 788.433900] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.442902] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255646, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.764039] env[61545]: DEBUG oslo_concurrency.lockutils [None req-de67c4ae-ff04-4f32-8770-3937feaad8fc tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "72656070-cfd0-4104-a9c7-ec20c5a6238a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.364s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.950140] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255646, 'name': ReconfigVM_Task, 'duration_secs': 0.514975} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.957446] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 5f4d6338-d1af-4e58-9f76-5e95d51e76f7/5f4d6338-d1af-4e58-9f76-5e95d51e76f7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 788.959393] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf52da3a-a3fc-4eab-aea3-cf0766666f2d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.966291] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 788.966291] env[61545]: value = "task-4255647" [ 788.966291] env[61545]: _type = "Task" [ 788.966291] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.976173] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255647, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.044152] env[61545]: DEBUG nova.network.neutron [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Successfully updated port: 3ecc6a7f-17b3-4e11-92bd-19f00ab1364e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 789.127965] env[61545]: DEBUG nova.network.neutron [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.353238] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc50f3f-47bc-4449-a288-9221e035d756 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.363397] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d571b1dd-5a7f-465e-b2a7-4bf8536cb897 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.396358] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb629c62-a5e6-4555-a09f-a3eb8510a627 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.406959] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6160e3-8f42-47ba-868a-88da5405ebf1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.422063] env[61545]: DEBUG nova.compute.provider_tree [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.476682] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255647, 'name': Rename_Task, 'duration_secs': 0.141727} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.477040] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 789.477322] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-039d805b-b497-491c-85a0-b297eb71186d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.484212] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 789.484212] env[61545]: value = "task-4255648" [ 789.484212] env[61545]: _type = "Task" [ 789.484212] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.492800] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.546884] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "refresh_cache-7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.547077] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquired lock "refresh_cache-7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.547253] env[61545]: DEBUG nova.network.neutron [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.631178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Releasing lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.631437] env[61545]: DEBUG nova.compute.manager [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Inject network info {{(pid=61545) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 789.631710] env[61545]: DEBUG nova.compute.manager [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] network_info to inject: |[{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 789.637808] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Reconfiguring VM instance to set the machine id {{(pid=61545) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 789.638157] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6138a99b-830c-41be-9cbc-daa14c5d3cad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.655322] env[61545]: DEBUG oslo_vmware.api [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 789.655322] env[61545]: value = "task-4255649" [ 789.655322] env[61545]: _type = "Task" [ 789.655322] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.666647] env[61545]: DEBUG oslo_vmware.api [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255649, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.908075] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.908075] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.908075] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.908075] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.908361] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.910550] env[61545]: INFO nova.compute.manager [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Terminating instance [ 789.924789] env[61545]: DEBUG nova.scheduler.client.report [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 789.994808] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255648, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.104898] env[61545]: DEBUG nova.network.neutron [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.166657] env[61545]: DEBUG oslo_vmware.api [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255649, 'name': ReconfigVM_Task, 'duration_secs': 0.183077} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.166964] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-87df51c2-796c-46d4-be19-39fe180572b1 tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Reconfigured VM instance to set the machine id {{(pid=61545) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 790.225597] env[61545]: DEBUG nova.compute.manager [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Received event network-changed-2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 790.225826] env[61545]: DEBUG nova.compute.manager [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Refreshing instance network info cache due to event network-changed-2b9f3635-8a28-4d33-be62-134aabc38027. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 790.226166] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Acquiring lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.226436] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Acquired lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.226560] env[61545]: DEBUG nova.network.neutron [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Refreshing network info cache for port 2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.292340] env[61545]: DEBUG nova.network.neutron [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Updating instance_info_cache with network_info: [{"id": "3ecc6a7f-17b3-4e11-92bd-19f00ab1364e", "address": "fa:16:3e:ce:4d:1d", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ecc6a7f-17", "ovs_interfaceid": "3ecc6a7f-17b3-4e11-92bd-19f00ab1364e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.415399] env[61545]: DEBUG nova.compute.manager [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 790.415667] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.416796] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdb4f21-d24f-4416-93e3-a05b332b4923 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.425415] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.425722] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84e1f09f-5b3a-488d-9155-38f2a152a51e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.430065] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.207s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.434023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.861s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.434862] env[61545]: INFO nova.compute.claims [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.438238] env[61545]: DEBUG oslo_vmware.api [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 790.438238] env[61545]: value = "task-4255650" [ 790.438238] env[61545]: _type = "Task" [ 790.438238] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.448889] env[61545]: DEBUG oslo_vmware.api [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.460073] env[61545]: INFO nova.scheduler.client.report [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleted allocations for instance 79ba6f70-c967-4abf-a2a7-c70046a2602d [ 790.502352] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255648, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.797210] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Releasing lock "refresh_cache-7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.797743] env[61545]: DEBUG nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Instance network_info: |[{"id": "3ecc6a7f-17b3-4e11-92bd-19f00ab1364e", "address": "fa:16:3e:ce:4d:1d", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ecc6a7f-17", "ovs_interfaceid": "3ecc6a7f-17b3-4e11-92bd-19f00ab1364e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 790.798403] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:4d:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ecc6a7f-17b3-4e11-92bd-19f00ab1364e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.811237] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 790.811595] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.811914] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b779e4c-ff8d-4f60-adb4-7e87effebc03 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.846513] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.846513] env[61545]: value = "task-4255651" [ 790.846513] env[61545]: _type = "Task" [ 790.846513] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.858624] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255651, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.953429] env[61545]: DEBUG oslo_vmware.api [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255650, 'name': PowerOffVM_Task, 'duration_secs': 0.253988} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.954457] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 790.954653] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 790.954911] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a3dad72-83f2-4950-90b2-13d7b4093d4d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.971332] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab422579-5fc8-4b80-94b5-b2698b77f238 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "79ba6f70-c967-4abf-a2a7-c70046a2602d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.712s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.998191] env[61545]: DEBUG oslo_vmware.api [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255648, 'name': PowerOnVM_Task, 'duration_secs': 1.319347} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.998517] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 790.998735] env[61545]: INFO nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Took 10.62 seconds to spawn the instance on the hypervisor. [ 790.998916] env[61545]: DEBUG nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.000467] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d8a643-2aba-4ce5-9ea1-7872ed98b9d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.024194] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 791.024194] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 791.024194] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Deleting the datastore file [datastore2] 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 791.024194] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdd6389f-891c-43b4-ae0b-8e50856b3d7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.032099] env[61545]: DEBUG oslo_vmware.api [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for the task: (returnval){ [ 791.032099] env[61545]: value = "task-4255653" [ 791.032099] env[61545]: _type = "Task" [ 791.032099] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.041032] env[61545]: DEBUG oslo_vmware.api [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.063589] env[61545]: DEBUG nova.network.neutron [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updated VIF entry in instance network info cache for port 2b9f3635-8a28-4d33-be62-134aabc38027. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 791.064036] env[61545]: DEBUG nova.network.neutron [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [{"id": "2b9f3635-8a28-4d33-be62-134aabc38027", "address": "fa:16:3e:b1:e2:fa", "network": {"id": "9c6c209f-7670-4eb7-b9fa-9098a27e3768", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-439021077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf5ed987e5404b629b6014e2b74d69d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9f3635-8a", "ovs_interfaceid": "2b9f3635-8a28-4d33-be62-134aabc38027", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.356259] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255651, 'name': CreateVM_Task, 'duration_secs': 0.366968} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.356460] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.357153] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.357327] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.357642] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.357907] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e21edd9b-61e4-4435-bee2-418d2a4ea335 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.362860] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 791.362860] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527604c4-3d1d-19ce-2cd2-837ec4602447" [ 791.362860] env[61545]: _type = "Task" [ 791.362860] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.372477] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527604c4-3d1d-19ce-2cd2-837ec4602447, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.527754] env[61545]: INFO nova.compute.manager [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Took 46.52 seconds to build instance. [ 791.542357] env[61545]: DEBUG oslo_vmware.api [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Task: {'id': task-4255653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208021} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.545836] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 791.546121] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 791.546359] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 791.546488] env[61545]: INFO nova.compute.manager [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 791.546730] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 791.547972] env[61545]: DEBUG nova.compute.manager [-] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 791.547972] env[61545]: DEBUG nova.network.neutron [-] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 791.567900] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Releasing lock "refresh_cache-6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.568406] env[61545]: DEBUG nova.compute.manager [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Received event network-vif-plugged-3ecc6a7f-17b3-4e11-92bd-19f00ab1364e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 791.572017] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Acquiring lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.572017] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.572017] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.572017] env[61545]: DEBUG nova.compute.manager [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] No waiting events found dispatching network-vif-plugged-3ecc6a7f-17b3-4e11-92bd-19f00ab1364e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 791.572017] env[61545]: WARNING nova.compute.manager [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Received unexpected event network-vif-plugged-3ecc6a7f-17b3-4e11-92bd-19f00ab1364e for instance with vm_state building and task_state spawning. [ 791.572017] env[61545]: DEBUG nova.compute.manager [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Received event network-changed-3ecc6a7f-17b3-4e11-92bd-19f00ab1364e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 791.572017] env[61545]: DEBUG nova.compute.manager [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Refreshing instance network info cache due to event network-changed-3ecc6a7f-17b3-4e11-92bd-19f00ab1364e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 791.572017] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Acquiring lock "refresh_cache-7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.572017] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Acquired lock "refresh_cache-7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.572017] env[61545]: DEBUG nova.network.neutron [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Refreshing network info cache for port 3ecc6a7f-17b3-4e11-92bd-19f00ab1364e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 791.880448] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527604c4-3d1d-19ce-2cd2-837ec4602447, 'name': SearchDatastore_Task, 'duration_secs': 0.012825} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.888037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.888037] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.888037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.888037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.888037] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.889026] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84a1bddf-94bc-48c3-b9c9-a5a4d015243c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.903418] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.903418] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.905175] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b968d72-2dac-4172-80a8-50688199e6fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.912291] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 791.912291] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ffcbc9-626f-4b47-b8b8-75110b0eff29" [ 791.912291] env[61545]: _type = "Task" [ 791.912291] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.922876] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ffcbc9-626f-4b47-b8b8-75110b0eff29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.034166] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b720d4b2-29bc-4e52-8d20-187580c45ccd tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.282s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.058222] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940ca8d2-8e9a-458a-9b06-5b3f3b2b3959 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.070679] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b03cd9-97fb-429a-85c9-f1cc2f370091 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.103465] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a339d403-fa70-44dd-a3f3-f1c080a64f5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.111748] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9da5c23-4c68-49f7-9779-b1d1e911d9b1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.127650] env[61545]: DEBUG nova.compute.provider_tree [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.216887] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.217086] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.262445] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.262776] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.427180] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ffcbc9-626f-4b47-b8b8-75110b0eff29, 'name': SearchDatastore_Task, 'duration_secs': 0.041855} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.427180] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8453bbf6-d29c-4a69-83d0-b01fd00c773f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.433927] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 792.433927] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528cd02b-3cf0-1045-8c6d-37d802b74d3d" [ 792.433927] env[61545]: _type = "Task" [ 792.433927] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.443496] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528cd02b-3cf0-1045-8c6d-37d802b74d3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.537283] env[61545]: DEBUG nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 792.631694] env[61545]: DEBUG nova.scheduler.client.report [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.698199] env[61545]: DEBUG nova.network.neutron [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Updated VIF entry in instance network info cache for port 3ecc6a7f-17b3-4e11-92bd-19f00ab1364e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 792.698569] env[61545]: DEBUG nova.network.neutron [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Updating instance_info_cache with network_info: [{"id": "3ecc6a7f-17b3-4e11-92bd-19f00ab1364e", "address": "fa:16:3e:ce:4d:1d", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ecc6a7f-17", "ovs_interfaceid": "3ecc6a7f-17b3-4e11-92bd-19f00ab1364e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.944711] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528cd02b-3cf0-1045-8c6d-37d802b74d3d, 'name': SearchDatastore_Task, 'duration_secs': 0.016048} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.945016] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.945293] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2/7d2bad05-c461-43b9-9dd0-bdefbd33e3a2.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.945585] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b699d0c-5d20-4c35-b80e-6eadb2aecc5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.955288] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 792.955288] env[61545]: value = "task-4255654" [ 792.955288] env[61545]: _type = "Task" [ 792.955288] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.965191] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.974558] env[61545]: DEBUG nova.compute.manager [req-26bb84f2-87dd-4bbc-81fc-ef9e88283b2e req-1a8a248d-59e5-452c-9c81-f1f1011614ea service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Received event network-vif-deleted-2b9f3635-8a28-4d33-be62-134aabc38027 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 792.974745] env[61545]: INFO nova.compute.manager [req-26bb84f2-87dd-4bbc-81fc-ef9e88283b2e req-1a8a248d-59e5-452c-9c81-f1f1011614ea service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Neutron deleted interface 2b9f3635-8a28-4d33-be62-134aabc38027; detaching it from the instance and deleting it from the info cache [ 792.974908] env[61545]: DEBUG nova.network.neutron [req-26bb84f2-87dd-4bbc-81fc-ef9e88283b2e req-1a8a248d-59e5-452c-9c81-f1f1011614ea service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.065506] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.139763] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.140389] env[61545]: DEBUG nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 793.143216] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.572s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.145220] env[61545]: INFO nova.compute.claims [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.201199] env[61545]: DEBUG oslo_concurrency.lockutils [req-75d47981-a4c5-485a-b649-0cf21e5a052f req-8da9ce68-2ec5-4f46-a47f-cbf002fceb69 service nova] Releasing lock "refresh_cache-7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.373029] env[61545]: DEBUG nova.network.neutron [-] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.465924] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255654, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.479045] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9dd1ea8e-edea-46fa-8fb1-4056dc68747b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.490324] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a73a05-d0e6-4cd0-999f-f43cb20e644c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.528211] env[61545]: DEBUG nova.compute.manager [req-26bb84f2-87dd-4bbc-81fc-ef9e88283b2e req-1a8a248d-59e5-452c-9c81-f1f1011614ea service nova] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Detach interface failed, port_id=2b9f3635-8a28-4d33-be62-134aabc38027, reason: Instance 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 793.650476] env[61545]: DEBUG nova.compute.utils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 793.655029] env[61545]: DEBUG nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 793.655029] env[61545]: DEBUG nova.network.neutron [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.714340] env[61545]: DEBUG nova.policy [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7580d3a6f9cf4799af863e85f35b0ea9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33b968c2bbc431686e949fdf795fa76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 793.877855] env[61545]: INFO nova.compute.manager [-] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Took 2.33 seconds to deallocate network for instance. [ 793.939567] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.939831] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.966886] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580187} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.967196] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2/7d2bad05-c461-43b9-9dd0-bdefbd33e3a2.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.967451] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.967732] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9c13bb1-9ea8-44a8-82ec-5ef41d9b54e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.976482] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 793.976482] env[61545]: value = "task-4255655" [ 793.976482] env[61545]: _type = "Task" [ 793.976482] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.985313] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.139206] env[61545]: DEBUG nova.network.neutron [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Successfully created port: ab535fb5-b111-46f9-8c40-e9647f50901b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.155292] env[61545]: DEBUG nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 794.386214] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.444892] env[61545]: DEBUG nova.compute.utils [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 794.490015] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074486} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.490365] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.491260] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7590671-e2d5-4f08-9319-f392896492e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.516043] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2/7d2bad05-c461-43b9-9dd0-bdefbd33e3a2.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.516372] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19820a15-0c8e-4dc4-8e83-26cd4963eec4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.539787] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 794.539787] env[61545]: value = "task-4255656" [ 794.539787] env[61545]: _type = "Task" [ 794.539787] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.551877] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.796023] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702ebc4d-08c7-4e6b-a277-88e6172b6591 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.804630] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579696ab-a244-4f20-8ac0-be8fa391f661 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.838409] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6e8949-4e85-4a1a-af17-5f4c17639e6b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.847227] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7ed0ee-1571-4bf2-98be-5bc5c91cf184 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.861774] env[61545]: DEBUG nova.compute.provider_tree [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.955235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.052217] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.168543] env[61545]: DEBUG nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 795.201035] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 795.201035] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.201280] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 795.201318] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.201436] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 795.201587] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 795.201859] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 795.202074] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 795.202256] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 795.202422] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 795.202598] env[61545]: DEBUG nova.virt.hardware [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 795.203507] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4137d2-dd39-4595-9ed2-fd65b3ee80dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.212951] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad01125-1c9b-4f1f-9543-a50d6a80b26a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.365678] env[61545]: DEBUG nova.scheduler.client.report [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.553846] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255656, 'name': ReconfigVM_Task, 'duration_secs': 0.583116} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.554372] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2/7d2bad05-c461-43b9-9dd0-bdefbd33e3a2.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.555187] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a5ad3ce-80b4-4b32-9284-41149bcae201 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.563997] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 795.563997] env[61545]: value = "task-4255657" [ 795.563997] env[61545]: _type = "Task" [ 795.563997] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.574263] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255657, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.848450] env[61545]: DEBUG nova.compute.manager [req-826d1e2c-c637-4f03-b086-7d6a21110f7b req-d93f0630-bde6-4055-b9ef-353d4a47bf7c service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Received event network-vif-plugged-ab535fb5-b111-46f9-8c40-e9647f50901b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 795.848724] env[61545]: DEBUG oslo_concurrency.lockutils [req-826d1e2c-c637-4f03-b086-7d6a21110f7b req-d93f0630-bde6-4055-b9ef-353d4a47bf7c service nova] Acquiring lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.848966] env[61545]: DEBUG oslo_concurrency.lockutils [req-826d1e2c-c637-4f03-b086-7d6a21110f7b req-d93f0630-bde6-4055-b9ef-353d4a47bf7c service nova] Lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.849847] env[61545]: DEBUG oslo_concurrency.lockutils [req-826d1e2c-c637-4f03-b086-7d6a21110f7b req-d93f0630-bde6-4055-b9ef-353d4a47bf7c service nova] Lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.849847] env[61545]: DEBUG nova.compute.manager [req-826d1e2c-c637-4f03-b086-7d6a21110f7b req-d93f0630-bde6-4055-b9ef-353d4a47bf7c service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] No waiting events found dispatching network-vif-plugged-ab535fb5-b111-46f9-8c40-e9647f50901b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 795.849847] env[61545]: WARNING nova.compute.manager [req-826d1e2c-c637-4f03-b086-7d6a21110f7b req-d93f0630-bde6-4055-b9ef-353d4a47bf7c service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Received unexpected event network-vif-plugged-ab535fb5-b111-46f9-8c40-e9647f50901b for instance with vm_state building and task_state spawning. [ 795.871602] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.872315] env[61545]: DEBUG nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 795.874950] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 37.879s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.973177] env[61545]: DEBUG nova.network.neutron [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Successfully updated port: ab535fb5-b111-46f9-8c40-e9647f50901b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 796.030354] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.031054] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.031054] env[61545]: INFO nova.compute.manager [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Attaching volume c677bc6f-54cc-40f1-b2ce-631b8412bdab to /dev/sdb [ 796.070352] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2f7f4a-4533-4a4f-b471-d60eb162b0ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.081227] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716b8e86-4154-4ca0-8a7e-3aec19713a91 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.084304] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255657, 'name': Rename_Task, 'duration_secs': 0.316465} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.084576] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.085257] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b0d6c0c-9df2-4d2e-a3a4-90fa44922af1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.093511] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 796.093511] env[61545]: value = "task-4255658" [ 796.093511] env[61545]: _type = "Task" [ 796.093511] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.099809] env[61545]: DEBUG nova.virt.block_device [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Updating existing volume attachment record: 5770064d-3404-41e9-819d-b6643f0779e9 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 796.108559] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.378942] env[61545]: DEBUG nova.compute.utils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 796.389277] env[61545]: DEBUG nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 796.389277] env[61545]: DEBUG nova.network.neutron [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.458248] env[61545]: DEBUG nova.policy [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff4f3a3710024eca8e3b4ab9ea9be625', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ea094c1108b4bcdafa4a1198a796312', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 796.475359] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.475607] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.475784] env[61545]: DEBUG nova.network.neutron [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.603363] env[61545]: DEBUG oslo_vmware.api [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255658, 'name': PowerOnVM_Task, 'duration_secs': 0.477741} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.603655] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.603861] env[61545]: INFO nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Took 8.68 seconds to spawn the instance on the hypervisor. [ 796.604092] env[61545]: DEBUG nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.604950] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b0a36b-cc5d-4e27-8ef0-f670e6a6d90c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.889884] env[61545]: DEBUG nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 796.925028] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 844f01ed-4dae-4e13-9d1c-09a73f413201 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.925461] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 8a3ac91d-8949-4745-9161-1a70899c0293 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 796.925610] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 0554c462-1dc5-4043-94ac-7a3d28ed05e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.925739] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d7ed99e5-3f96-4053-9b9a-a4b7edb1f351 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.925853] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance b2579785-d1a4-48da-ba27-6ee3098578f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.925978] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e8c954ec-de76-4d3e-9a63-6c30523d5b63 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.926105] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance eced4107-b99e-479e-b22c-2157320ecf95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.926327] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 796.926394] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 4b29ebc4-d913-447c-bc57-890953cf8d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.926455] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d7e25ea6-7076-4ab2-aed6-fe5232c2665d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.926596] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance with task_state "deleting" is not being actively managed by this compute host but has allocations referencing this compute node (7015027d-c4e1-4938-ac31-6e4672774d7e): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 796.926723] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1722d63d-e604-44fe-8198-13e6c5bce016 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 796.926846] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 2a0576f9-d740-4dfa-9783-17eb3987840b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 796.926974] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.927079] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.928242] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 8d838d3b-32ad-4bb2-839e-6bd81c363447 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.928242] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5f4d6338-d1af-4e58-9f76-5e95d51e76f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.928242] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.928242] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 16bc91d0-71c3-4bd9-980b-6574c3fd9335 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.928242] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance a127cc27-7155-4a7a-871a-c3e67a99bfc8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 796.944417] env[61545]: DEBUG nova.network.neutron [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Successfully created port: 613c36a6-a4a9-4e6d-9252-7f43cd7584ff {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.025413] env[61545]: DEBUG nova.network.neutron [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.129642] env[61545]: INFO nova.compute.manager [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Took 46.25 seconds to build instance. [ 797.373095] env[61545]: DEBUG nova.network.neutron [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Updating instance_info_cache with network_info: [{"id": "ab535fb5-b111-46f9-8c40-e9647f50901b", "address": "fa:16:3e:f5:f9:73", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab535fb5-b1", "ovs_interfaceid": "ab535fb5-b111-46f9-8c40-e9647f50901b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.431832] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e21de424-8121-4e2f-84c2-8096ba8048cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.632704] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9478079f-0c30-4e5c-a5c1-c165092fb267 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.720s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.876181] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.876530] env[61545]: DEBUG nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Instance network_info: |[{"id": "ab535fb5-b111-46f9-8c40-e9647f50901b", "address": "fa:16:3e:f5:f9:73", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab535fb5-b1", "ovs_interfaceid": "ab535fb5-b111-46f9-8c40-e9647f50901b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 797.877108] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:f9:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0467beaa-08c6-44d6-b8a2-e9c609c21ff4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab535fb5-b111-46f9-8c40-e9647f50901b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.884653] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Creating folder: Project (c33b968c2bbc431686e949fdf795fa76). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.886113] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff34b818-2af6-48be-9ed7-1cc4e2986330 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.888901] env[61545]: DEBUG nova.compute.manager [req-8dbf57b3-d0d2-41ac-83cc-23d4479bd892 req-f81b6e8d-0eb9-42fa-9f50-f788c2417de6 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Received event network-changed-ab535fb5-b111-46f9-8c40-e9647f50901b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 797.889106] env[61545]: DEBUG nova.compute.manager [req-8dbf57b3-d0d2-41ac-83cc-23d4479bd892 req-f81b6e8d-0eb9-42fa-9f50-f788c2417de6 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Refreshing instance network info cache due to event network-changed-ab535fb5-b111-46f9-8c40-e9647f50901b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 797.889332] env[61545]: DEBUG oslo_concurrency.lockutils [req-8dbf57b3-d0d2-41ac-83cc-23d4479bd892 req-f81b6e8d-0eb9-42fa-9f50-f788c2417de6 service nova] Acquiring lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.889473] env[61545]: DEBUG oslo_concurrency.lockutils [req-8dbf57b3-d0d2-41ac-83cc-23d4479bd892 req-f81b6e8d-0eb9-42fa-9f50-f788c2417de6 service nova] Acquired lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.889632] env[61545]: DEBUG nova.network.neutron [req-8dbf57b3-d0d2-41ac-83cc-23d4479bd892 req-f81b6e8d-0eb9-42fa-9f50-f788c2417de6 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Refreshing network info cache for port ab535fb5-b111-46f9-8c40-e9647f50901b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.901193] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Created folder: Project (c33b968c2bbc431686e949fdf795fa76) in parent group-v838542. [ 797.901513] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Creating folder: Instances. Parent ref: group-v838639. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.901669] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9f59e94-35f6-41d3-ab61-9832cbdc4a52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.904368] env[61545]: DEBUG nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 797.916660] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Created folder: Instances in parent group-v838639. [ 797.916998] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.917152] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.917375] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f79461a4-644a-445b-bd22-9876e2c111ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.935188] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance fa08b76f-d64d-46e9-9865-1ab2e9b1d823 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.942326] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 797.944081] env[61545]: DEBUG nova.virt.hardware [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 797.944871] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda33162-f7fd-475f-a361-2e59058c4f4a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.950028] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.950028] env[61545]: value = "task-4255664" [ 797.950028] env[61545]: _type = "Task" [ 797.950028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.957628] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12afa5c-2d8a-440b-b005-505371eec332 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.965880] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255664, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.137542] env[61545]: DEBUG nova.compute.manager [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.438671] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d517f427-8580-481b-b50f-150da6c571b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.461566] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255664, 'name': CreateVM_Task, 'duration_secs': 0.381395} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.461727] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 798.462497] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.462594] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.462919] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 798.463511] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4b4a573-b6bd-4555-a8bc-20a398513748 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.468866] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 798.468866] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522e7086-2c44-bd02-060e-f891bedaaf2d" [ 798.468866] env[61545]: _type = "Task" [ 798.468866] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.478042] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522e7086-2c44-bd02-060e-f891bedaaf2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.675434] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.800978] env[61545]: DEBUG nova.network.neutron [req-8dbf57b3-d0d2-41ac-83cc-23d4479bd892 req-f81b6e8d-0eb9-42fa-9f50-f788c2417de6 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Updated VIF entry in instance network info cache for port ab535fb5-b111-46f9-8c40-e9647f50901b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.801370] env[61545]: DEBUG nova.network.neutron [req-8dbf57b3-d0d2-41ac-83cc-23d4479bd892 req-f81b6e8d-0eb9-42fa-9f50-f788c2417de6 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Updating instance_info_cache with network_info: [{"id": "ab535fb5-b111-46f9-8c40-e9647f50901b", "address": "fa:16:3e:f5:f9:73", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab535fb5-b1", "ovs_interfaceid": "ab535fb5-b111-46f9-8c40-e9647f50901b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.942763] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5b2fb040-a964-479f-ae3f-4f428248d64b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.981020] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522e7086-2c44-bd02-060e-f891bedaaf2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010689} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.981357] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.981611] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.981874] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.982030] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.982239] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.982603] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7f52a29-823f-4b3b-bd10-9762d4b20f06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.992030] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.992287] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.993289] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22f8d2e5-021e-49b3-a008-7fbf85919b18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.999181] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 798.999181] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528fe0c0-411d-531a-a6c7-e7d40ef7e9f7" [ 798.999181] env[61545]: _type = "Task" [ 798.999181] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.008188] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528fe0c0-411d-531a-a6c7-e7d40ef7e9f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.025389] env[61545]: DEBUG nova.network.neutron [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Successfully updated port: 613c36a6-a4a9-4e6d-9252-7f43cd7584ff {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.304837] env[61545]: DEBUG oslo_concurrency.lockutils [req-8dbf57b3-d0d2-41ac-83cc-23d4479bd892 req-f81b6e8d-0eb9-42fa-9f50-f788c2417de6 service nova] Releasing lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.446341] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 609ba431-b42b-4b0d-9c16-06e19bee114c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.509550] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528fe0c0-411d-531a-a6c7-e7d40ef7e9f7, 'name': SearchDatastore_Task, 'duration_secs': 0.0191} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.510417] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50cf7a1c-b550-4833-8ed0-32b073b361eb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.516486] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 799.516486] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52753462-6be0-fc80-e443-cbe709cef8e6" [ 799.516486] env[61545]: _type = "Task" [ 799.516486] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.524470] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52753462-6be0-fc80-e443-cbe709cef8e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.528168] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquiring lock "refresh_cache-a127cc27-7155-4a7a-871a-c3e67a99bfc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.528384] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquired lock "refresh_cache-a127cc27-7155-4a7a-871a-c3e67a99bfc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.528606] env[61545]: DEBUG nova.network.neutron [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.677650] env[61545]: DEBUG nova.compute.manager [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.678880] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b8db55-0db9-4632-9eb8-34c0dd32d87e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.950827] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 9cf6dd9e-40e9-4df6-9342-2850e0f93d85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.028915] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52753462-6be0-fc80-e443-cbe709cef8e6, 'name': SearchDatastore_Task, 'duration_secs': 0.009588} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.029200] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.029408] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335/16bc91d0-71c3-4bd9-980b-6574c3fd9335.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 800.029772] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b15c648-53f9-4d6e-bda9-128e31196081 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.039219] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 800.039219] env[61545]: value = "task-4255666" [ 800.039219] env[61545]: _type = "Task" [ 800.039219] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.056071] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.170639] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquiring lock "b2579785-d1a4-48da-ba27-6ee3098578f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.170953] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "b2579785-d1a4-48da-ba27-6ee3098578f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.171279] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquiring lock "b2579785-d1a4-48da-ba27-6ee3098578f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.171492] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "b2579785-d1a4-48da-ba27-6ee3098578f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.171666] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "b2579785-d1a4-48da-ba27-6ee3098578f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.173963] env[61545]: INFO nova.compute.manager [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Terminating instance [ 800.192209] env[61545]: INFO nova.compute.manager [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] instance snapshotting [ 800.195822] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d29aa77-5873-4b9c-be62-6e081f7184a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.230196] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cdfa2a-b711-49ad-8851-113bf8840ee4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.234807] env[61545]: DEBUG nova.compute.manager [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Received event network-vif-plugged-613c36a6-a4a9-4e6d-9252-7f43cd7584ff {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 800.234807] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] Acquiring lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.234972] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] Lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.235215] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] Lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.235448] env[61545]: DEBUG nova.compute.manager [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] No waiting events found dispatching network-vif-plugged-613c36a6-a4a9-4e6d-9252-7f43cd7584ff {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 800.235677] env[61545]: WARNING nova.compute.manager [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Received unexpected event network-vif-plugged-613c36a6-a4a9-4e6d-9252-7f43cd7584ff for instance with vm_state building and task_state spawning. [ 800.235900] env[61545]: DEBUG nova.compute.manager [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Received event network-changed-613c36a6-a4a9-4e6d-9252-7f43cd7584ff {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 800.236195] env[61545]: DEBUG nova.compute.manager [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Refreshing instance network info cache due to event network-changed-613c36a6-a4a9-4e6d-9252-7f43cd7584ff. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 800.236402] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] Acquiring lock "refresh_cache-a127cc27-7155-4a7a-871a-c3e67a99bfc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.239054] env[61545]: DEBUG nova.network.neutron [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.455436] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance ced5bde7-07b9-4d07-8b13-49f6fb006eed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.471054] env[61545]: DEBUG nova.network.neutron [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Updating instance_info_cache with network_info: [{"id": "613c36a6-a4a9-4e6d-9252-7f43cd7584ff", "address": "fa:16:3e:44:d6:ba", "network": {"id": "e10a7b7a-ef84-4162-83a5-9d11d9b84780", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1156720617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ea094c1108b4bcdafa4a1198a796312", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8edfde4-5a99-4745-956d-04da82ab1b85", "external-id": "nsx-vlan-transportzone-519", "segmentation_id": 519, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap613c36a6-a4", "ovs_interfaceid": "613c36a6-a4a9-4e6d-9252-7f43cd7584ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.549868] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255666, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.678747] env[61545]: DEBUG nova.compute.manager [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 800.678747] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 800.679092] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b60f57b-9020-4727-afd6-134af8a6f2d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.688035] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.688373] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac12edb9-44bd-4122-b3da-d3602a718dc2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.696787] env[61545]: DEBUG oslo_vmware.api [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 800.696787] env[61545]: value = "task-4255667" [ 800.696787] env[61545]: _type = "Task" [ 800.696787] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.706680] env[61545]: DEBUG oslo_vmware.api [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.748118] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 800.748357] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6a0c212f-12fc-4d27-8ecb-e7a6ce818cdc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.757694] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 800.757694] env[61545]: value = "task-4255668" [ 800.757694] env[61545]: _type = "Task" [ 800.757694] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.768082] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255668, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.964201] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 6f2a4514-4de9-427d-91be-f445235696bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.973844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Releasing lock "refresh_cache-a127cc27-7155-4a7a-871a-c3e67a99bfc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.974290] env[61545]: DEBUG nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Instance network_info: |[{"id": "613c36a6-a4a9-4e6d-9252-7f43cd7584ff", "address": "fa:16:3e:44:d6:ba", "network": {"id": "e10a7b7a-ef84-4162-83a5-9d11d9b84780", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1156720617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ea094c1108b4bcdafa4a1198a796312", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8edfde4-5a99-4745-956d-04da82ab1b85", "external-id": "nsx-vlan-transportzone-519", "segmentation_id": 519, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap613c36a6-a4", "ovs_interfaceid": "613c36a6-a4a9-4e6d-9252-7f43cd7584ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 800.975062] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] Acquired lock "refresh_cache-a127cc27-7155-4a7a-871a-c3e67a99bfc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.975062] env[61545]: DEBUG nova.network.neutron [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Refreshing network info cache for port 613c36a6-a4a9-4e6d-9252-7f43cd7584ff {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.977220] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:d6:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8edfde4-5a99-4745-956d-04da82ab1b85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '613c36a6-a4a9-4e6d-9252-7f43cd7584ff', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.984335] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Creating folder: Project (1ea094c1108b4bcdafa4a1198a796312). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 800.988562] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abecf19e-69c8-491c-adbd-c46dc6f8fe34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.001731] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Created folder: Project (1ea094c1108b4bcdafa4a1198a796312) in parent group-v838542. [ 801.001986] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Creating folder: Instances. Parent ref: group-v838642. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 801.002306] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97b3eef1-c210-4322-a7d5-f0d0e1b0c11b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.014205] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Created folder: Instances in parent group-v838642. [ 801.014205] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 801.014435] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 801.014504] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ca8566e-3adf-4faa-8432-a08f4c7b3d04 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.037231] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 801.037231] env[61545]: value = "task-4255671" [ 801.037231] env[61545]: _type = "Task" [ 801.037231] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.050122] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255671, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.054018] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.956106} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.057127] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335/16bc91d0-71c3-4bd9-980b-6574c3fd9335.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 801.057443] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 801.058080] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3baa1dc9-484c-4d40-af91-82c2f670ec65 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.068835] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 801.068835] env[61545]: value = "task-4255672" [ 801.068835] env[61545]: _type = "Task" [ 801.068835] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.082537] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255672, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.150981] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 801.151416] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838638', 'volume_id': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'name': 'volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd7e25ea6-7076-4ab2-aed6-fe5232c2665d', 'attached_at': '', 'detached_at': '', 'volume_id': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'serial': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 801.152797] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec7cecc-0ead-425a-9298-d86670cba9f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.172991] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15879d69-2850-4dcf-a9a3-429122519632 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.198363] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab/volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.202138] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e276e39e-0f90-43ef-ad57-ac4bbf4053b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.223137] env[61545]: DEBUG oslo_vmware.api [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255667, 'name': PowerOffVM_Task, 'duration_secs': 0.421966} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.224590] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.224775] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.225127] env[61545]: DEBUG oslo_vmware.api [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 801.225127] env[61545]: value = "task-4255673" [ 801.225127] env[61545]: _type = "Task" [ 801.225127] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.225418] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c08c528-8853-4b65-8551-ee8ef4009be8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.238311] env[61545]: DEBUG oslo_vmware.api [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255673, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.268541] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255668, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.325570] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 801.325841] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 801.326137] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Deleting the datastore file [datastore2] b2579785-d1a4-48da-ba27-6ee3098578f1 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.326719] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fb3052a-251f-43f7-8963-488825a4015e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.332036] env[61545]: DEBUG nova.network.neutron [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Updated VIF entry in instance network info cache for port 613c36a6-a4a9-4e6d-9252-7f43cd7584ff. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 801.332036] env[61545]: DEBUG nova.network.neutron [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Updating instance_info_cache with network_info: [{"id": "613c36a6-a4a9-4e6d-9252-7f43cd7584ff", "address": "fa:16:3e:44:d6:ba", "network": {"id": "e10a7b7a-ef84-4162-83a5-9d11d9b84780", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1156720617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ea094c1108b4bcdafa4a1198a796312", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8edfde4-5a99-4745-956d-04da82ab1b85", "external-id": "nsx-vlan-transportzone-519", "segmentation_id": 519, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap613c36a6-a4", "ovs_interfaceid": "613c36a6-a4a9-4e6d-9252-7f43cd7584ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.335415] env[61545]: DEBUG oslo_vmware.api [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for the task: (returnval){ [ 801.335415] env[61545]: value = "task-4255675" [ 801.335415] env[61545]: _type = "Task" [ 801.335415] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.345992] env[61545]: DEBUG oslo_vmware.api [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.468028] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 13db992b-db13-451f-a853-9b7de28b9184 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.547863] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255671, 'name': CreateVM_Task, 'duration_secs': 0.343458} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.548066] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 801.548781] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.548979] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.549351] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 801.549675] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dff20b2-c946-4725-9770-c943b9339933 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.555208] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 801.555208] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526c7411-05ec-1b8b-2ac7-4189877afff7" [ 801.555208] env[61545]: _type = "Task" [ 801.555208] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.564106] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526c7411-05ec-1b8b-2ac7-4189877afff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.578097] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255672, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075602} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.578385] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.579214] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f655d999-f40c-473b-a9a5-fb09cc5e3aac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.602968] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335/16bc91d0-71c3-4bd9-980b-6574c3fd9335.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.603353] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c47c135f-f482-444d-8e8b-fb7de989dc9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.625691] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 801.625691] env[61545]: value = "task-4255676" [ 801.625691] env[61545]: _type = "Task" [ 801.625691] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.635765] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255676, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.737771] env[61545]: DEBUG oslo_vmware.api [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.770900] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255668, 'name': CreateSnapshot_Task, 'duration_secs': 0.792713} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.771250] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 801.772262] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202ad7a4-7305-4cae-a9f0-d3e6501091e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.837386] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b139d3-1f18-4d8d-b323-cdceee623663 req-12878826-2ab1-4143-8e8f-b3d661b298ee service nova] Releasing lock "refresh_cache-a127cc27-7155-4a7a-871a-c3e67a99bfc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.846875] env[61545]: DEBUG oslo_vmware.api [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Task: {'id': task-4255675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18886} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.847178] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 801.847416] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 801.847632] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 801.847814] env[61545]: INFO nova.compute.manager [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Took 1.17 seconds to destroy the instance on the hypervisor. [ 801.848082] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 801.848288] env[61545]: DEBUG nova.compute.manager [-] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 801.848387] env[61545]: DEBUG nova.network.neutron [-] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.972540] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 60edf62d-3fb8-4d85-9a4e-ef71c565d940 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 802.072403] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526c7411-05ec-1b8b-2ac7-4189877afff7, 'name': SearchDatastore_Task, 'duration_secs': 0.009798} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.072767] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.073104] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.073259] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.073407] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.073597] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.073878] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cc2226c-704c-4db6-908d-a2ce0cd6463d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.085403] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.085600] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 802.086472] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8e8282d-d535-4e77-b2f2-d5c3a3f4bc9c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.092847] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 802.092847] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5631a-987d-74cc-2fd7-4bd71b885d40" [ 802.092847] env[61545]: _type = "Task" [ 802.092847] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.102789] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5631a-987d-74cc-2fd7-4bd71b885d40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.136153] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.239583] env[61545]: DEBUG oslo_vmware.api [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255673, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.292680] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 802.293500] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-039db241-46e6-49ba-995b-8cea14bb0315 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.305103] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 802.305103] env[61545]: value = "task-4255677" [ 802.305103] env[61545]: _type = "Task" [ 802.305103] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.314837] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255677, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.477520] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 69c59bd5-1f57-4fa2-afab-348e5f57501e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 802.604296] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5631a-987d-74cc-2fd7-4bd71b885d40, 'name': SearchDatastore_Task, 'duration_secs': 0.037841} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.605188] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-916da62d-3106-4adb-bf68-fa9da960caa2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.611391] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 802.611391] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e4bcf8-ccc6-6cf8-7420-b7df4cc6b71e" [ 802.611391] env[61545]: _type = "Task" [ 802.611391] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.620525] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e4bcf8-ccc6-6cf8-7420-b7df4cc6b71e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.643412] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255676, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.745354] env[61545]: DEBUG oslo_vmware.api [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255673, 'name': ReconfigVM_Task, 'duration_secs': 1.054586} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.745354] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Reconfigured VM instance instance-0000001a to attach disk [datastore1] volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab/volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.753171] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-482b4340-d73c-4f03-8eea-a745c0014362 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.777036] env[61545]: DEBUG oslo_vmware.api [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 802.777036] env[61545]: value = "task-4255678" [ 802.777036] env[61545]: _type = "Task" [ 802.777036] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.792683] env[61545]: DEBUG oslo_vmware.api [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255678, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.822050] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255677, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.867799] env[61545]: DEBUG nova.compute.manager [req-8de072c2-f685-4855-9c76-57b0da34c41c req-a6477e73-3cc8-4692-bcf5-128a06b4d21b service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Received event network-vif-deleted-4e58a211-5e27-49a0-a9b2-0cb26978fd99 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 802.867799] env[61545]: INFO nova.compute.manager [req-8de072c2-f685-4855-9c76-57b0da34c41c req-a6477e73-3cc8-4692-bcf5-128a06b4d21b service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Neutron deleted interface 4e58a211-5e27-49a0-a9b2-0cb26978fd99; detaching it from the instance and deleting it from the info cache [ 802.867799] env[61545]: DEBUG nova.network.neutron [req-8de072c2-f685-4855-9c76-57b0da34c41c req-a6477e73-3cc8-4692-bcf5-128a06b4d21b service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.981920] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 602bd42d-6afa-4419-8352-73a9daab2fe0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.124030] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e4bcf8-ccc6-6cf8-7420-b7df4cc6b71e, 'name': SearchDatastore_Task, 'duration_secs': 0.034233} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.125155] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.125155] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a127cc27-7155-4a7a-871a-c3e67a99bfc8/a127cc27-7155-4a7a-871a-c3e67a99bfc8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 803.125337] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b265570-cbfb-4010-bfa5-f74ed2b50664 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.138429] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255676, 'name': ReconfigVM_Task, 'duration_secs': 1.042419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.140054] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335/16bc91d0-71c3-4bd9-980b-6574c3fd9335.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.141192] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 803.141192] env[61545]: value = "task-4255679" [ 803.141192] env[61545]: _type = "Task" [ 803.141192] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.141192] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f0c1169-a62e-4d21-951f-0ef632223c4c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.154080] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.154494] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 803.154494] env[61545]: value = "task-4255680" [ 803.154494] env[61545]: _type = "Task" [ 803.154494] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.163585] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255680, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.165021] env[61545]: DEBUG nova.network.neutron [-] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.287810] env[61545]: DEBUG oslo_vmware.api [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255678, 'name': ReconfigVM_Task, 'duration_secs': 0.172094} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.287810] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838638', 'volume_id': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'name': 'volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd7e25ea6-7076-4ab2-aed6-fe5232c2665d', 'attached_at': '', 'detached_at': '', 'volume_id': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'serial': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 803.317990] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255677, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.370679] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9df48c86-2d47-425c-89c4-ed47b7170be1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.383821] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0149cd25-cc9c-4078-b0c3-03ac5067a065 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.418911] env[61545]: DEBUG nova.compute.manager [req-8de072c2-f685-4855-9c76-57b0da34c41c req-a6477e73-3cc8-4692-bcf5-128a06b4d21b service nova] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Detach interface failed, port_id=4e58a211-5e27-49a0-a9b2-0cb26978fd99, reason: Instance b2579785-d1a4-48da-ba27-6ee3098578f1 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 803.484868] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance ecf98c79-da3d-44be-9c76-c3fccc688235 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.654984] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255679, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47544} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.655378] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a127cc27-7155-4a7a-871a-c3e67a99bfc8/a127cc27-7155-4a7a-871a-c3e67a99bfc8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 803.655708] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 803.659129] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25fc29df-0672-48bf-aa0e-9476647c61c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.666894] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255680, 'name': Rename_Task, 'duration_secs': 0.149677} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.668389] env[61545]: INFO nova.compute.manager [-] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Took 1.82 seconds to deallocate network for instance. [ 803.668694] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 803.669028] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 803.669028] env[61545]: value = "task-4255681" [ 803.669028] env[61545]: _type = "Task" [ 803.669028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.670778] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf767bf7-edf4-4bc8-8167-c5526b0cdc7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.685430] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255681, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.686964] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 803.686964] env[61545]: value = "task-4255682" [ 803.686964] env[61545]: _type = "Task" [ 803.686964] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.696110] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255682, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.822232] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255677, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.988312] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance dad53420-37f1-42ef-b0d3-e35c73b97417 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 804.180881] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.184424] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255681, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083356} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.184716] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 804.185627] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1741d8-b0d8-4ba1-b460-a41d591ccaac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.198344] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255682, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.217896] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] a127cc27-7155-4a7a-871a-c3e67a99bfc8/a127cc27-7155-4a7a-871a-c3e67a99bfc8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.218267] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e983054d-e909-4ee6-b033-9a929470395c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.241551] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 804.241551] env[61545]: value = "task-4255683" [ 804.241551] env[61545]: _type = "Task" [ 804.241551] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.256425] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255683, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.318902] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255677, 'name': CloneVM_Task, 'duration_secs': 1.798876} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.319202] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Created linked-clone VM from snapshot [ 804.319985] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485a5073-ded1-4d99-8bcc-f8af956b855d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.329059] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Uploading image c11d951d-cb35-46c8-85de-6e5c84aeceb6 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 804.338348] env[61545]: DEBUG nova.objects.instance [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lazy-loading 'flavor' on Instance uuid d7e25ea6-7076-4ab2-aed6-fe5232c2665d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 804.358185] env[61545]: DEBUG oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 804.358185] env[61545]: value = "vm-838646" [ 804.358185] env[61545]: _type = "VirtualMachine" [ 804.358185] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 804.358787] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1b79400b-3d31-45dd-b882-729b6a7bccb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.368758] env[61545]: DEBUG oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lease: (returnval){ [ 804.368758] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ced2c6-baf2-e447-9e4f-d4bea6589aeb" [ 804.368758] env[61545]: _type = "HttpNfcLease" [ 804.368758] env[61545]: } obtained for exporting VM: (result){ [ 804.368758] env[61545]: value = "vm-838646" [ 804.368758] env[61545]: _type = "VirtualMachine" [ 804.368758] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 804.369084] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the lease: (returnval){ [ 804.369084] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ced2c6-baf2-e447-9e4f-d4bea6589aeb" [ 804.369084] env[61545]: _type = "HttpNfcLease" [ 804.369084] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 804.377813] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 804.377813] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ced2c6-baf2-e447-9e4f-d4bea6589aeb" [ 804.377813] env[61545]: _type = "HttpNfcLease" [ 804.377813] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 804.492220] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e3742aa7-0b26-41f5-b8c0-9388ef2b7e74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 804.701612] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255682, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.758512] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255683, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.834188] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "56680678-c844-4dd2-8541-d50de83b22d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.834586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "56680678-c844-4dd2-8541-d50de83b22d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.844753] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7fa23faa-2d4e-4aca-81ee-479c34c55a2d tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.814s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.881589] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 804.881589] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ced2c6-baf2-e447-9e4f-d4bea6589aeb" [ 804.881589] env[61545]: _type = "HttpNfcLease" [ 804.881589] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 804.881885] env[61545]: DEBUG oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 804.881885] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ced2c6-baf2-e447-9e4f-d4bea6589aeb" [ 804.881885] env[61545]: _type = "HttpNfcLease" [ 804.881885] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 804.882673] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfda5c31-9420-40fb-995a-c46448ee8150 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.892777] env[61545]: DEBUG oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52216fd4-5236-d50b-38f1-90433c620ac7/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 804.892970] env[61545]: DEBUG oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52216fd4-5236-d50b-38f1-90433c620ac7/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 804.996400] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 62301196-fb8a-45fe-9193-0ad8f7126ab5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 805.059582] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3ca5136d-22e0-45b5-a2ac-229e68be4b74 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.199257] env[61545]: DEBUG oslo_vmware.api [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255682, 'name': PowerOnVM_Task, 'duration_secs': 1.077425} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.199521] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 805.199714] env[61545]: INFO nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Took 10.03 seconds to spawn the instance on the hypervisor. [ 805.199884] env[61545]: DEBUG nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.200694] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8574c3fe-67eb-4afd-9d14-3dddced25eaf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.254490] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255683, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.300915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.304049] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.499033] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 9b62358e-c834-461c-9954-49f513b0f4ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 805.725108] env[61545]: INFO nova.compute.manager [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Took 51.19 seconds to build instance. [ 805.756200] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255683, 'name': ReconfigVM_Task, 'duration_secs': 1.138374} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.756600] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Reconfigured VM instance instance-00000024 to attach disk [datastore2] a127cc27-7155-4a7a-871a-c3e67a99bfc8/a127cc27-7155-4a7a-871a-c3e67a99bfc8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.757271] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0934e790-a545-4abb-99e0-737d0a044b89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.764704] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 805.764704] env[61545]: value = "task-4255685" [ 805.764704] env[61545]: _type = "Task" [ 805.764704] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.774932] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255685, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.806939] env[61545]: INFO nova.compute.manager [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Detaching volume c677bc6f-54cc-40f1-b2ce-631b8412bdab [ 805.851680] env[61545]: INFO nova.virt.block_device [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Attempting to driver detach volume c677bc6f-54cc-40f1-b2ce-631b8412bdab from mountpoint /dev/sdb [ 805.852222] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 805.852689] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838638', 'volume_id': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'name': 'volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd7e25ea6-7076-4ab2-aed6-fe5232c2665d', 'attached_at': '', 'detached_at': '', 'volume_id': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'serial': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 805.853698] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdcea66-6753-4620-a0f5-d37ee284c9cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.878414] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8e2e1e-2eb6-4c63-9116-ae4d0fc6b53e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.887542] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434e68e1-59a2-4bb3-9e6f-806517d9b408 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.915135] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95cb8599-0781-4d22-ad01-b23f6e540c55 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.934446] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] The volume has not been displaced from its original location: [datastore1] volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab/volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 805.940766] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Reconfiguring VM instance instance-0000001a to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 805.941260] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3605eba6-86c8-4905-840f-5fe22c401095 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.963260] env[61545]: DEBUG oslo_vmware.api [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 805.963260] env[61545]: value = "task-4255686" [ 805.963260] env[61545]: _type = "Task" [ 805.963260] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.974160] env[61545]: DEBUG oslo_vmware.api [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255686, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.002306] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.226918] env[61545]: DEBUG oslo_concurrency.lockutils [None req-afdcdcfc-1598-4b49-bed9-450c0db8b7fa tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.836s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.280652] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255685, 'name': Rename_Task, 'duration_secs': 0.273962} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.280999] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 806.281304] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74955f8b-4747-4b1c-b18c-b8e51690683e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.291348] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 806.291348] env[61545]: value = "task-4255687" [ 806.291348] env[61545]: _type = "Task" [ 806.291348] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.304050] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.475909] env[61545]: DEBUG oslo_vmware.api [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255686, 'name': ReconfigVM_Task, 'duration_secs': 0.334076} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.476672] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Reconfigured VM instance instance-0000001a to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 806.482287] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21b329a3-5071-4860-b82c-fec3f5cd051b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.500879] env[61545]: DEBUG oslo_vmware.api [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 806.500879] env[61545]: value = "task-4255688" [ 806.500879] env[61545]: _type = "Task" [ 806.500879] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.506062] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.514659] env[61545]: DEBUG oslo_vmware.api [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255688, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.730058] env[61545]: DEBUG nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 806.803818] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255687, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.013572] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance f7a16153-2ef7-4be4-90a2-5ad6616203f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 807.015563] env[61545]: DEBUG oslo_vmware.api [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255688, 'name': ReconfigVM_Task, 'duration_secs': 0.174184} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.016313] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838638', 'volume_id': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'name': 'volume-c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd7e25ea6-7076-4ab2-aed6-fe5232c2665d', 'attached_at': '', 'detached_at': '', 'volume_id': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab', 'serial': 'c677bc6f-54cc-40f1-b2ce-631b8412bdab'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 807.256415] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.301825] env[61545]: DEBUG oslo_vmware.api [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255687, 'name': PowerOnVM_Task, 'duration_secs': 0.591959} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.302131] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.302379] env[61545]: INFO nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Took 9.40 seconds to spawn the instance on the hypervisor. [ 807.302564] env[61545]: DEBUG nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.303371] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7532602-596f-4880-8fd0-7bc2fa51342e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.517597] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance a84d7a3d-2f7e-459d-94ca-7caa32b7a472 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 807.587109] env[61545]: DEBUG nova.objects.instance [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lazy-loading 'flavor' on Instance uuid d7e25ea6-7076-4ab2-aed6-fe5232c2665d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.823065] env[61545]: INFO nova.compute.manager [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Took 50.28 seconds to build instance. [ 808.023618] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance bea2e59c-02fd-4d6d-8f10-b0e265fa87a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.023938] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 808.024157] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3456MB phys_disk=250GB used_disk=14GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 808.324738] env[61545]: DEBUG oslo_concurrency.lockutils [None req-15ed074c-f131-40fa-a3bf-e0f3b6bb19ec tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.778s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.607449] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e5a0f1e-823f-4a36-9fff-214194a3ed6e tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.306s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.663268] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7f3208-7a4f-4c1d-9cd5-d2ea85180b31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.671776] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a511568b-504f-45ab-ac52-abfc5f4b08db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.704750] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1554f9-b997-4d51-9dd7-42c0191fdc29 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.714381] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959116cc-f857-4f2a-a45e-a5cc7a73736f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.729072] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.798753] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquiring lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.798753] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.798945] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquiring lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.799046] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.799225] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.801457] env[61545]: INFO nova.compute.manager [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Terminating instance [ 808.829271] env[61545]: DEBUG nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 809.233166] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 809.306235] env[61545]: DEBUG nova.compute.manager [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 809.306505] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 809.307523] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb857dfb-366b-4b17-ae3b-d5cbafe1ee78 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.316607] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.316957] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be749222-2a8e-4c08-b8c2-30d5e4b2012c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.324396] env[61545]: DEBUG oslo_vmware.api [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 809.324396] env[61545]: value = "task-4255689" [ 809.324396] env[61545]: _type = "Task" [ 809.324396] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.336130] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "1537dbf0-d1b6-410f-8333-788761dd24d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.336380] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "1537dbf0-d1b6-410f-8333-788761dd24d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.338705] env[61545]: DEBUG oslo_vmware.api [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255689, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.353409] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.741873] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 809.742222] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.867s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.743022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.408s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.744202] env[61545]: INFO nova.compute.claims [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.835610] env[61545]: DEBUG oslo_vmware.api [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255689, 'name': PowerOffVM_Task, 'duration_secs': 0.412607} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.835945] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 809.836230] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 809.836547] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-303213d9-4e14-4334-b34a-3c3d923ce08d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.906801] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 809.907101] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 809.907295] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Deleting the datastore file [datastore2] a127cc27-7155-4a7a-871a-c3e67a99bfc8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 809.907634] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e0d1175-a764-4c0a-92aa-7c0023181754 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.915285] env[61545]: DEBUG oslo_vmware.api [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for the task: (returnval){ [ 809.915285] env[61545]: value = "task-4255691" [ 809.915285] env[61545]: _type = "Task" [ 809.915285] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.923496] env[61545]: DEBUG oslo_vmware.api [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.425320] env[61545]: DEBUG oslo_vmware.api [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.929807] env[61545]: DEBUG oslo_vmware.api [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Task: {'id': task-4255691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.748751} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.929807] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.929807] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 810.929807] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 810.929807] env[61545]: INFO nova.compute.manager [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Took 1.62 seconds to destroy the instance on the hypervisor. [ 810.929807] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.929807] env[61545]: DEBUG nova.compute.manager [-] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 810.929807] env[61545]: DEBUG nova.network.neutron [-] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 811.411229] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f029127-5d4e-477a-91b4-b1ab223c1c93 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.419668] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabc5a9b-d506-4533-8e27-b2664f4598fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.451787] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a95a0f-faac-4440-94b5-92ff5d62a192 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.460518] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e51c6b-181b-49f9-a6f5-7539fd30ce7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.475306] env[61545]: DEBUG nova.compute.provider_tree [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.560818] env[61545]: DEBUG nova.compute.manager [req-ee9b0fea-d20e-4d04-b73f-d19e15b7bc50 req-f3a17cce-59a9-4ce2-99e4-3248ff768878 service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Received event network-vif-deleted-613c36a6-a4a9-4e6d-9252-7f43cd7584ff {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 811.560903] env[61545]: INFO nova.compute.manager [req-ee9b0fea-d20e-4d04-b73f-d19e15b7bc50 req-f3a17cce-59a9-4ce2-99e4-3248ff768878 service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Neutron deleted interface 613c36a6-a4a9-4e6d-9252-7f43cd7584ff; detaching it from the instance and deleting it from the info cache [ 811.561201] env[61545]: DEBUG nova.network.neutron [req-ee9b0fea-d20e-4d04-b73f-d19e15b7bc50 req-f3a17cce-59a9-4ce2-99e4-3248ff768878 service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.892409] env[61545]: DEBUG nova.network.neutron [-] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.978599] env[61545]: DEBUG nova.scheduler.client.report [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 812.064263] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09b13a2e-b9da-4a24-a812-23201a997a14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.075367] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591cccaf-0684-4a28-ad2c-f4f29bb42fbb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.108107] env[61545]: DEBUG nova.compute.manager [req-ee9b0fea-d20e-4d04-b73f-d19e15b7bc50 req-f3a17cce-59a9-4ce2-99e4-3248ff768878 service nova] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Detach interface failed, port_id=613c36a6-a4a9-4e6d-9252-7f43cd7584ff, reason: Instance a127cc27-7155-4a7a-871a-c3e67a99bfc8 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 812.395921] env[61545]: INFO nova.compute.manager [-] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Took 1.47 seconds to deallocate network for instance. [ 812.484365] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.741s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.484826] env[61545]: DEBUG nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 812.487501] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.231s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.489318] env[61545]: INFO nova.compute.claims [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 812.903778] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.996844] env[61545]: DEBUG nova.compute.utils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 812.998314] env[61545]: DEBUG nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 812.998482] env[61545]: DEBUG nova.network.neutron [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 813.051838] env[61545]: DEBUG nova.policy [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7580d3a6f9cf4799af863e85f35b0ea9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33b968c2bbc431686e949fdf795fa76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 813.369111] env[61545]: DEBUG nova.network.neutron [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Successfully created port: bb1a376a-94fe-4856-ab21-de3f98dda10e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 813.502681] env[61545]: DEBUG nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 814.150708] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c0b474-418c-4845-bb67-4f364302090c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.159560] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15bae86-f334-44ff-ad6b-01e4501f292d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.190423] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f678af-e5bc-49f0-ae9e-9c40e2f5553d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.198650] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f1d586-1dc6-4223-80d2-ed93efb878a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.212640] env[61545]: DEBUG nova.compute.provider_tree [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.514977] env[61545]: DEBUG nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 814.544629] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 814.545097] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 814.545097] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 814.545203] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 814.545362] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 814.545566] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 814.545782] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 814.545942] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 814.546144] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 814.546315] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 814.546494] env[61545]: DEBUG nova.virt.hardware [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 814.547417] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac76021d-c63e-4209-bed7-7ed56f1ca59a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.555996] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe14d73-5cab-44f3-9d85-c67e852411c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.716433] env[61545]: DEBUG nova.scheduler.client.report [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.081461] env[61545]: DEBUG nova.compute.manager [req-9ba8e131-b7f1-443e-b535-c679bf2cffe0 req-fcbb4476-485e-49a0-bbba-a3d6f7a9163a service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Received event network-vif-plugged-bb1a376a-94fe-4856-ab21-de3f98dda10e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 815.081685] env[61545]: DEBUG oslo_concurrency.lockutils [req-9ba8e131-b7f1-443e-b535-c679bf2cffe0 req-fcbb4476-485e-49a0-bbba-a3d6f7a9163a service nova] Acquiring lock "e21de424-8121-4e2f-84c2-8096ba8048cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.081893] env[61545]: DEBUG oslo_concurrency.lockutils [req-9ba8e131-b7f1-443e-b535-c679bf2cffe0 req-fcbb4476-485e-49a0-bbba-a3d6f7a9163a service nova] Lock "e21de424-8121-4e2f-84c2-8096ba8048cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.083431] env[61545]: DEBUG oslo_concurrency.lockutils [req-9ba8e131-b7f1-443e-b535-c679bf2cffe0 req-fcbb4476-485e-49a0-bbba-a3d6f7a9163a service nova] Lock "e21de424-8121-4e2f-84c2-8096ba8048cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.083682] env[61545]: DEBUG nova.compute.manager [req-9ba8e131-b7f1-443e-b535-c679bf2cffe0 req-fcbb4476-485e-49a0-bbba-a3d6f7a9163a service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] No waiting events found dispatching network-vif-plugged-bb1a376a-94fe-4856-ab21-de3f98dda10e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 815.083865] env[61545]: WARNING nova.compute.manager [req-9ba8e131-b7f1-443e-b535-c679bf2cffe0 req-fcbb4476-485e-49a0-bbba-a3d6f7a9163a service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Received unexpected event network-vif-plugged-bb1a376a-94fe-4856-ab21-de3f98dda10e for instance with vm_state building and task_state spawning. [ 815.112688] env[61545]: DEBUG oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52216fd4-5236-d50b-38f1-90433c620ac7/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 815.113760] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ab1324-fecd-475e-97d6-b641c90f1694 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.123910] env[61545]: DEBUG oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52216fd4-5236-d50b-38f1-90433c620ac7/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 815.124114] env[61545]: ERROR oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52216fd4-5236-d50b-38f1-90433c620ac7/disk-0.vmdk due to incomplete transfer. [ 815.124368] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d1cbcc81-844c-4e91-a65a-8942abaaf283 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.132356] env[61545]: DEBUG oslo_vmware.rw_handles [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52216fd4-5236-d50b-38f1-90433c620ac7/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 815.132601] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Uploaded image c11d951d-cb35-46c8-85de-6e5c84aeceb6 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 815.134980] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 815.135285] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f17beaf1-819f-463d-a57a-bbd89e85a3e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.142051] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 815.142051] env[61545]: value = "task-4255692" [ 815.142051] env[61545]: _type = "Task" [ 815.142051] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.150899] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255692, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.177852] env[61545]: DEBUG nova.network.neutron [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Successfully updated port: bb1a376a-94fe-4856-ab21-de3f98dda10e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 815.222121] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.222706] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 815.226144] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.909s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.227467] env[61545]: INFO nova.compute.claims [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.652841] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255692, 'name': Destroy_Task, 'duration_secs': 0.299304} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.653167] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Destroyed the VM [ 815.653423] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 815.653693] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-317b4f0b-16a2-402d-87f6-c9f4ea9c45e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.660924] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 815.660924] env[61545]: value = "task-4255693" [ 815.660924] env[61545]: _type = "Task" [ 815.660924] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.669466] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255693, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.681684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.681684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.681862] env[61545]: DEBUG nova.network.neutron [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.732866] env[61545]: DEBUG nova.compute.utils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 815.736594] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 815.736808] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 815.801659] env[61545]: DEBUG nova.policy [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab62fe0e535247abaec82034763a679d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01b77b701ec241d9ab2ccfa6a15dbf26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 816.172537] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255693, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.238707] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 816.251711] env[61545]: DEBUG nova.network.neutron [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 816.445988] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Successfully created port: 5d92ef0a-2647-43af-b441-58b6137c730e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.614852] env[61545]: DEBUG nova.network.neutron [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Updating instance_info_cache with network_info: [{"id": "bb1a376a-94fe-4856-ab21-de3f98dda10e", "address": "fa:16:3e:3e:16:dc", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb1a376a-94", "ovs_interfaceid": "bb1a376a-94fe-4856-ab21-de3f98dda10e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.673202] env[61545]: DEBUG oslo_vmware.api [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255693, 'name': RemoveSnapshot_Task, 'duration_secs': 0.570002} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.675867] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 816.676111] env[61545]: INFO nova.compute.manager [None req-645b75f6-a35a-4545-93fb-b59a6d7e35c9 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Took 16.48 seconds to snapshot the instance on the hypervisor. [ 816.930917] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebb2714-0614-4044-8083-e2ac622e7573 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.939785] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d443d6dd-36a5-46a8-9dfa-35f05a028a9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.972069] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8b118f-5b44-4963-a38b-ee9fc9b18c30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.981274] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1922b24-17eb-437a-bcfd-c08767f4e940 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.996515] env[61545]: DEBUG nova.compute.provider_tree [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.109411] env[61545]: DEBUG nova.compute.manager [req-25e610d9-52c6-4c3e-8c91-ddd099d0e34b req-d54a56d1-c5f8-4332-a327-03623938d727 service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Received event network-changed-bb1a376a-94fe-4856-ab21-de3f98dda10e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 817.109612] env[61545]: DEBUG nova.compute.manager [req-25e610d9-52c6-4c3e-8c91-ddd099d0e34b req-d54a56d1-c5f8-4332-a327-03623938d727 service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Refreshing instance network info cache due to event network-changed-bb1a376a-94fe-4856-ab21-de3f98dda10e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 817.109801] env[61545]: DEBUG oslo_concurrency.lockutils [req-25e610d9-52c6-4c3e-8c91-ddd099d0e34b req-d54a56d1-c5f8-4332-a327-03623938d727 service nova] Acquiring lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.120413] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.120413] env[61545]: DEBUG nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Instance network_info: |[{"id": "bb1a376a-94fe-4856-ab21-de3f98dda10e", "address": "fa:16:3e:3e:16:dc", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb1a376a-94", "ovs_interfaceid": "bb1a376a-94fe-4856-ab21-de3f98dda10e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 817.120619] env[61545]: DEBUG oslo_concurrency.lockutils [req-25e610d9-52c6-4c3e-8c91-ddd099d0e34b req-d54a56d1-c5f8-4332-a327-03623938d727 service nova] Acquired lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.120619] env[61545]: DEBUG nova.network.neutron [req-25e610d9-52c6-4c3e-8c91-ddd099d0e34b req-d54a56d1-c5f8-4332-a327-03623938d727 service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Refreshing network info cache for port bb1a376a-94fe-4856-ab21-de3f98dda10e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 817.121799] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:16:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0467beaa-08c6-44d6-b8a2-e9c609c21ff4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb1a376a-94fe-4856-ab21-de3f98dda10e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 817.129568] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 817.132671] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 817.133126] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8068389d-95ba-404b-b21b-3bbb566aff34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.153722] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 817.153722] env[61545]: value = "task-4255694" [ 817.153722] env[61545]: _type = "Task" [ 817.153722] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.162067] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255694, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.252374] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 817.278684] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 817.278930] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.279110] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 817.279300] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.279450] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 817.279598] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 817.279809] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 817.279968] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 817.280172] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 817.280343] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 817.280516] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 817.283580] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c7223a-78ae-4bae-90dc-e4ee9c6445fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.294050] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8052a3db-7158-4643-8676-9bd86e066a36 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.448037] env[61545]: DEBUG nova.network.neutron [req-25e610d9-52c6-4c3e-8c91-ddd099d0e34b req-d54a56d1-c5f8-4332-a327-03623938d727 service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Updated VIF entry in instance network info cache for port bb1a376a-94fe-4856-ab21-de3f98dda10e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 817.448488] env[61545]: DEBUG nova.network.neutron [req-25e610d9-52c6-4c3e-8c91-ddd099d0e34b req-d54a56d1-c5f8-4332-a327-03623938d727 service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Updating instance_info_cache with network_info: [{"id": "bb1a376a-94fe-4856-ab21-de3f98dda10e", "address": "fa:16:3e:3e:16:dc", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb1a376a-94", "ovs_interfaceid": "bb1a376a-94fe-4856-ab21-de3f98dda10e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.499872] env[61545]: DEBUG nova.scheduler.client.report [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.665027] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255694, 'name': CreateVM_Task, 'duration_secs': 0.347346} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.665027] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 817.665580] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.665751] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.666093] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 817.666390] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4014c69b-54ba-493e-9456-23aa528d72fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.672026] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 817.672026] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526fa17e-72e7-066e-47c1-b856e74f0f71" [ 817.672026] env[61545]: _type = "Task" [ 817.672026] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.681329] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526fa17e-72e7-066e-47c1-b856e74f0f71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.951624] env[61545]: DEBUG oslo_concurrency.lockutils [req-25e610d9-52c6-4c3e-8c91-ddd099d0e34b req-d54a56d1-c5f8-4332-a327-03623938d727 service nova] Releasing lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.005048] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.779s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.006055] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 818.008583] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.658s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.008864] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.011115] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.619s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.013037] env[61545]: INFO nova.compute.claims [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.050641] env[61545]: INFO nova.scheduler.client.report [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Deleted allocations for instance 5a284df5-88ea-43bf-9944-ef344f99591c [ 818.183483] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526fa17e-72e7-066e-47c1-b856e74f0f71, 'name': SearchDatastore_Task, 'duration_secs': 0.012176} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.183813] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.184069] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 818.184324] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.184474] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.184648] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 818.184916] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98008121-db20-4766-99b0-73c15518943d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.195215] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 818.195394] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 818.196192] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ee0f162-3c09-40eb-ba58-bd4907ec425f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.202390] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 818.202390] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d894e-d3b1-591b-1093-ef6da313f8b9" [ 818.202390] env[61545]: _type = "Task" [ 818.202390] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.211216] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d894e-d3b1-591b-1093-ef6da313f8b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.343032] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Successfully updated port: 5d92ef0a-2647-43af-b441-58b6137c730e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 818.517609] env[61545]: DEBUG nova.compute.utils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 818.522181] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 818.522340] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 818.558717] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a9f20e51-97ad-4531-b296-8559d61fcee1 tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.752s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.559752] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 40.707s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.559975] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.560215] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.560746] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.562678] env[61545]: INFO nova.compute.manager [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Terminating instance [ 818.608251] env[61545]: DEBUG nova.policy [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab62fe0e535247abaec82034763a679d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01b77b701ec241d9ab2ccfa6a15dbf26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 818.718568] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d894e-d3b1-591b-1093-ef6da313f8b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009866} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.719616] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b1ce631-195a-40f2-9e85-872507c48754 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.725101] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 818.725101] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520d9a8d-67fc-f951-4409-b41b6151dea9" [ 818.725101] env[61545]: _type = "Task" [ 818.725101] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.733547] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520d9a8d-67fc-f951-4409-b41b6151dea9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.768971] env[61545]: DEBUG nova.compute.manager [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.769853] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cccae18-c199-4ee6-9490-42c390df0f4d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.845436] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "refresh_cache-fa08b76f-d64d-46e9-9865-1ab2e9b1d823" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.845635] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "refresh_cache-fa08b76f-d64d-46e9-9865-1ab2e9b1d823" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.845748] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.025455] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 819.066348] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.066421] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquired lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.066565] env[61545]: DEBUG nova.network.neutron [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.154856] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Successfully created port: 70ed8a3b-0e27-462d-84c6-05185727c589 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.236441] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520d9a8d-67fc-f951-4409-b41b6151dea9, 'name': SearchDatastore_Task, 'duration_secs': 0.010427} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.236743] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.237111] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc/e21de424-8121-4e2f-84c2-8096ba8048cc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 819.237289] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0612859b-1483-41bd-9240-26b6a3d0de2c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.246687] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 819.246687] env[61545]: value = "task-4255695" [ 819.246687] env[61545]: _type = "Task" [ 819.246687] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.254810] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.282817] env[61545]: INFO nova.compute.manager [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] instance snapshotting [ 819.287073] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8789b526-29c2-4c6c-801f-f452577978a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.307542] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ee8ac2-799c-4fda-b8a2-f42c97b902c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.406784] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.462187] env[61545]: DEBUG nova.compute.manager [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Received event network-vif-plugged-5d92ef0a-2647-43af-b441-58b6137c730e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 819.462419] env[61545]: DEBUG oslo_concurrency.lockutils [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] Acquiring lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.462630] env[61545]: DEBUG oslo_concurrency.lockutils [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] Lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.462831] env[61545]: DEBUG oslo_concurrency.lockutils [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] Lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.463027] env[61545]: DEBUG nova.compute.manager [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] No waiting events found dispatching network-vif-plugged-5d92ef0a-2647-43af-b441-58b6137c730e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 819.463197] env[61545]: WARNING nova.compute.manager [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Received unexpected event network-vif-plugged-5d92ef0a-2647-43af-b441-58b6137c730e for instance with vm_state building and task_state spawning. [ 819.463373] env[61545]: DEBUG nova.compute.manager [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Received event network-changed-5d92ef0a-2647-43af-b441-58b6137c730e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 819.463537] env[61545]: DEBUG nova.compute.manager [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Refreshing instance network info cache due to event network-changed-5d92ef0a-2647-43af-b441-58b6137c730e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 819.463707] env[61545]: DEBUG oslo_concurrency.lockutils [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] Acquiring lock "refresh_cache-fa08b76f-d64d-46e9-9865-1ab2e9b1d823" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.569645] env[61545]: DEBUG nova.compute.utils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Can not refresh info_cache because instance was not found {{(pid=61545) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 819.714646] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6c93b9-2c2a-4752-8b62-25b6d07b4ca7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.723873] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90cd71c-0dfe-4a6a-bdc3-1e8dabbe2d26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.766514] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682a3a0b-32aa-41c6-bc09-502d8449c5e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.779507] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7105d557-5b51-4c8b-82db-2845d5eaa56f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.784090] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255695, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.796357] env[61545]: DEBUG nova.compute.provider_tree [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.819718] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 819.820336] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-df9fa6f6-ee5d-4d98-90fe-105bac5b3a78 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.831514] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 819.831514] env[61545]: value = "task-4255696" [ 819.831514] env[61545]: _type = "Task" [ 819.831514] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.836616] env[61545]: DEBUG nova.network.neutron [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.848776] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255696, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.041819] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.057576] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.057771] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.058043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.058313] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.058411] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.067560] env[61545]: INFO nova.compute.manager [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Terminating instance [ 820.076472] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.076609] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.076819] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.077078] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.077299] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.077540] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.077781] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.077941] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.078123] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.078288] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.082081] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.083993] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6398048-cbbf-473b-ae73-0e6edaea3183 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.090290] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Updating instance_info_cache with network_info: [{"id": "5d92ef0a-2647-43af-b441-58b6137c730e", "address": "fa:16:3e:39:ba:1e", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d92ef0a-26", "ovs_interfaceid": "5d92ef0a-2647-43af-b441-58b6137c730e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.096225] env[61545]: DEBUG nova.network.neutron [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.097233] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6bd613-6454-4f68-82e1-d5e35378b2a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.273777] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649837} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.274059] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc/e21de424-8121-4e2f-84c2-8096ba8048cc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 820.274280] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.274814] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9f846d7-6959-4caa-82c5-23f37201e726 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.282332] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 820.282332] env[61545]: value = "task-4255697" [ 820.282332] env[61545]: _type = "Task" [ 820.282332] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.294109] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.300580] env[61545]: DEBUG nova.scheduler.client.report [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.342574] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255696, 'name': CreateSnapshot_Task, 'duration_secs': 0.49904} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.342831] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 820.343618] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583c65d7-f5c0-4c68-8b11-c419c7c2de36 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.590166] env[61545]: DEBUG nova.compute.manager [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 820.590426] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 820.591416] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ed9c09-3147-441a-9de8-86379e5933eb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.594723] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "refresh_cache-fa08b76f-d64d-46e9-9865-1ab2e9b1d823" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.595048] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Instance network_info: |[{"id": "5d92ef0a-2647-43af-b441-58b6137c730e", "address": "fa:16:3e:39:ba:1e", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d92ef0a-26", "ovs_interfaceid": "5d92ef0a-2647-43af-b441-58b6137c730e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.595686] env[61545]: DEBUG oslo_concurrency.lockutils [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] Acquired lock "refresh_cache-fa08b76f-d64d-46e9-9865-1ab2e9b1d823" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.595872] env[61545]: DEBUG nova.network.neutron [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Refreshing network info cache for port 5d92ef0a-2647-43af-b441-58b6137c730e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.597520] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:ba:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13af9422-d668-4413-b63a-766558d83a3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d92ef0a-2647-43af-b441-58b6137c730e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.605328] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Creating folder: Project (01b77b701ec241d9ab2ccfa6a15dbf26). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.609351] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Releasing lock "refresh_cache-5a284df5-88ea-43bf-9944-ef344f99591c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.609725] env[61545]: DEBUG nova.compute.manager [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 820.609915] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 820.610203] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae9a0e44-9f10-4f3b-a24a-08ad585c052c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.614493] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c50ea80b-4165-4e3f-9ea7-391d8f399ef8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.616684] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 820.617266] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6095211b-9f3c-40b6-9acd-57036734bed1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.626036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055c6631-7048-4286-ba58-26ca1f632a9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.640735] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Created folder: Project (01b77b701ec241d9ab2ccfa6a15dbf26) in parent group-v838542. [ 820.641094] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Creating folder: Instances. Parent ref: group-v838649. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.641513] env[61545]: DEBUG oslo_vmware.api [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 820.641513] env[61545]: value = "task-4255699" [ 820.641513] env[61545]: _type = "Task" [ 820.641513] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.642260] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba643c14-b3e6-44bb-9680-ab554c060430 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.658788] env[61545]: DEBUG oslo_vmware.api [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.672483] env[61545]: WARNING nova.virt.vmwareapi.vmops [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5a284df5-88ea-43bf-9944-ef344f99591c could not be found. [ 820.672731] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.672913] env[61545]: INFO nova.compute.manager [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Took 0.06 seconds to destroy the instance on the hypervisor. [ 820.673184] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.675750] env[61545]: DEBUG nova.compute.manager [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 820.675856] env[61545]: DEBUG nova.network.neutron [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 820.709705] env[61545]: DEBUG nova.network.neutron [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.792906] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098223} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.796220] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 820.798655] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53cab3f-8957-45ac-a586-8e47927ef6a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.801976] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Created folder: Instances in parent group-v838649. [ 820.802261] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.802488] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.803534] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af2b68af-0552-40e5-a90f-154f898978fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.828790] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.829366] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 820.841783] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc/e21de424-8121-4e2f-84c2-8096ba8048cc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 820.845256] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.306s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.845465] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.847587] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.065s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.849171] env[61545]: INFO nova.compute.claims [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.852591] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98e576c3-f340-4f88-b255-51613dbcbc53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.876864] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 820.877641] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.877641] env[61545]: value = "task-4255701" [ 820.877641] env[61545]: _type = "Task" [ 820.877641] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.880180] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4245051a-45d1-4b51-b68c-4fc48cad28a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.887861] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 820.887861] env[61545]: value = "task-4255702" [ 820.887861] env[61545]: _type = "Task" [ 820.887861] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.896678] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 820.896678] env[61545]: value = "task-4255703" [ 820.896678] env[61545]: _type = "Task" [ 820.896678] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.896975] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255701, 'name': CreateVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.906966] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255702, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.911199] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255703, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.911584] env[61545]: INFO nova.scheduler.client.report [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Deleted allocations for instance 1722d63d-e604-44fe-8198-13e6c5bce016 [ 821.002607] env[61545]: DEBUG nova.network.neutron [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Updated VIF entry in instance network info cache for port 5d92ef0a-2647-43af-b441-58b6137c730e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.003037] env[61545]: DEBUG nova.network.neutron [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Updating instance_info_cache with network_info: [{"id": "5d92ef0a-2647-43af-b441-58b6137c730e", "address": "fa:16:3e:39:ba:1e", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d92ef0a-26", "ovs_interfaceid": "5d92ef0a-2647-43af-b441-58b6137c730e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.156723] env[61545]: DEBUG oslo_vmware.api [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255699, 'name': PowerOffVM_Task, 'duration_secs': 0.219119} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.157149] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 821.157372] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 821.157655] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43c8bebb-1482-466f-9e26-01fe9ce3ebd4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.212229] env[61545]: DEBUG nova.network.neutron [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.241079] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 821.241079] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 821.241079] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Deleting the datastore file [datastore2] d7ed99e5-3f96-4053-9b9a-a4b7edb1f351 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 821.241079] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-237b891e-6de3-46a3-924e-aaf55d2f53ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.247903] env[61545]: DEBUG oslo_vmware.api [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for the task: (returnval){ [ 821.247903] env[61545]: value = "task-4255705" [ 821.247903] env[61545]: _type = "Task" [ 821.247903] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.258688] env[61545]: DEBUG oslo_vmware.api [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.348408] env[61545]: DEBUG nova.compute.utils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.350480] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 821.351084] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.395026] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255701, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.410531] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255702, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.413819] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255703, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.419400] env[61545]: DEBUG oslo_concurrency.lockutils [None req-277e2812-d2ec-470a-852c-f3c2b5eb7ad1 tempest-ServerMetadataNegativeTestJSON-2088309652 tempest-ServerMetadataNegativeTestJSON-2088309652-project-member] Lock "1722d63d-e604-44fe-8198-13e6c5bce016" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.922s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.499300] env[61545]: DEBUG nova.policy [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab62fe0e535247abaec82034763a679d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01b77b701ec241d9ab2ccfa6a15dbf26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 821.506344] env[61545]: DEBUG oslo_concurrency.lockutils [req-ef4f736e-11a5-42de-8e8b-8546fa690c8c req-b800d58c-7523-4cb8-823f-15e64343ee6e service nova] Releasing lock "refresh_cache-fa08b76f-d64d-46e9-9865-1ab2e9b1d823" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.689539] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Successfully updated port: 70ed8a3b-0e27-462d-84c6-05185727c589 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.720614] env[61545]: INFO nova.compute.manager [-] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Took 1.04 seconds to deallocate network for instance. [ 821.760421] env[61545]: DEBUG oslo_vmware.api [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Task: {'id': task-4255705, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151201} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.760904] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 821.761253] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 821.762055] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 821.762393] env[61545]: INFO nova.compute.manager [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Took 1.17 seconds to destroy the instance on the hypervisor. [ 821.763305] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.765075] env[61545]: DEBUG nova.compute.manager [-] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 821.765075] env[61545]: DEBUG nova.network.neutron [-] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 821.768694] env[61545]: DEBUG nova.compute.manager [req-4d89b8f6-3d9f-4dbf-a83e-abe8cc58abfe req-56b3fbc6-8fea-4b1e-b805-fa05495df4ba service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] Received event network-vif-plugged-70ed8a3b-0e27-462d-84c6-05185727c589 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 821.769041] env[61545]: DEBUG oslo_concurrency.lockutils [req-4d89b8f6-3d9f-4dbf-a83e-abe8cc58abfe req-56b3fbc6-8fea-4b1e-b805-fa05495df4ba service nova] Acquiring lock "d517f427-8580-481b-b50f-150da6c571b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.771135] env[61545]: DEBUG oslo_concurrency.lockutils [req-4d89b8f6-3d9f-4dbf-a83e-abe8cc58abfe req-56b3fbc6-8fea-4b1e-b805-fa05495df4ba service nova] Lock "d517f427-8580-481b-b50f-150da6c571b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.771135] env[61545]: DEBUG oslo_concurrency.lockutils [req-4d89b8f6-3d9f-4dbf-a83e-abe8cc58abfe req-56b3fbc6-8fea-4b1e-b805-fa05495df4ba service nova] Lock "d517f427-8580-481b-b50f-150da6c571b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.771135] env[61545]: DEBUG nova.compute.manager [req-4d89b8f6-3d9f-4dbf-a83e-abe8cc58abfe req-56b3fbc6-8fea-4b1e-b805-fa05495df4ba service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] No waiting events found dispatching network-vif-plugged-70ed8a3b-0e27-462d-84c6-05185727c589 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.771135] env[61545]: WARNING nova.compute.manager [req-4d89b8f6-3d9f-4dbf-a83e-abe8cc58abfe req-56b3fbc6-8fea-4b1e-b805-fa05495df4ba service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] Received unexpected event network-vif-plugged-70ed8a3b-0e27-462d-84c6-05185727c589 for instance with vm_state building and task_state spawning. [ 821.853666] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 821.892929] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255701, 'name': CreateVM_Task, 'duration_secs': 0.681305} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.893346] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.897022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.897022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.897022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 821.899889] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-256c7a32-bfeb-4454-afd0-1bebd3c06288 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.923059] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255702, 'name': ReconfigVM_Task, 'duration_secs': 0.66518} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.926110] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Reconfigured VM instance instance-00000025 to attach disk [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc/e21de424-8121-4e2f-84c2-8096ba8048cc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.927293] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 821.927293] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b7cbe0-bb00-a875-da79-0ae8b2fd102a" [ 821.927293] env[61545]: _type = "Task" [ 821.927293] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.927531] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fed139e4-cb8e-4fe8-ada1-1b64b16275f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.938429] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255703, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.949688] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 821.949688] env[61545]: value = "task-4255706" [ 821.949688] env[61545]: _type = "Task" [ 821.949688] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.950290] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b7cbe0-bb00-a875-da79-0ae8b2fd102a, 'name': SearchDatastore_Task, 'duration_secs': 0.009995} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.950703] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.951387] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.953956] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.954184] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.954408] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.957747] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7c7e914-5bae-493d-8ff4-3b3a62882dbd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.971371] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255706, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.972704] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.972880] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.973637] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5489e1a6-45e9-4605-918f-f0788eb1321d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.983993] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 821.983993] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a348cf-8d96-4915-352f-aee00e1f5394" [ 821.983993] env[61545]: _type = "Task" [ 821.983993] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.992986] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a348cf-8d96-4915-352f-aee00e1f5394, 'name': SearchDatastore_Task, 'duration_secs': 0.009899} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.993787] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd21ed77-9725-4b6a-bc5d-647d67304f05 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.002762] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 822.002762] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ce9d7-ce81-f03c-e600-d0718dd4fb66" [ 822.002762] env[61545]: _type = "Task" [ 822.002762] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.012665] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ce9d7-ce81-f03c-e600-d0718dd4fb66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.192696] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "refresh_cache-d517f427-8580-481b-b50f-150da6c571b9" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.192808] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "refresh_cache-d517f427-8580-481b-b50f-150da6c571b9" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.192945] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.214216] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Successfully created port: ca54cc50-d211-4ada-8e47-1747a860fa83 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 822.229865] env[61545]: INFO nova.compute.manager [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance disappeared during terminate [ 822.230201] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f22cd2bb-e2fb-4feb-b66a-65ad224ccf3f tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "5a284df5-88ea-43bf-9944-ef344f99591c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.670s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.417581] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255703, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.461653] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255706, 'name': Rename_Task, 'duration_secs': 0.175216} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.464551] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 822.465036] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b1f0f11-38c2-4587-8db4-c6a51205ebb6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.472604] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 822.472604] env[61545]: value = "task-4255707" [ 822.472604] env[61545]: _type = "Task" [ 822.472604] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.484478] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255707, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.516696] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ce9d7-ce81-f03c-e600-d0718dd4fb66, 'name': SearchDatastore_Task, 'duration_secs': 0.01147} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.517058] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.517443] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fa08b76f-d64d-46e9-9865-1ab2e9b1d823/fa08b76f-d64d-46e9-9865-1ab2e9b1d823.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.518113] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-728f3ea6-a5b1-4919-8820-230629f6e445 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.526725] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 822.526725] env[61545]: value = "task-4255708" [ 822.526725] env[61545]: _type = "Task" [ 822.526725] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.538011] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255708, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.604385] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe22b0c6-28d8-4944-8660-522708c19dd3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.613306] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b15bcb-4f1d-4adc-b65d-c4668d2da76e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.648817] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542d08b2-072c-42e6-876e-13863b4515e1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.658542] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dff91b-16bc-46a5-a564-2fea6a9fd8e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.674431] env[61545]: DEBUG nova.compute.provider_tree [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.752960] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.780713] env[61545]: DEBUG nova.network.neutron [-] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.863501] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 822.895815] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 822.896104] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.896281] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 822.896486] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.896634] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 822.897052] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 822.897052] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 822.897225] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 822.900739] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 822.900739] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 822.900739] env[61545]: DEBUG nova.virt.hardware [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 822.900739] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8b6b02-1f01-4667-a0c5-309fde370e6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.912122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd30d7d-b663-4ccb-8799-a1dec56dbeaa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.922775] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255703, 'name': CloneVM_Task, 'duration_secs': 1.600727} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.931443] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Created linked-clone VM from snapshot [ 822.932603] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be31bbc7-0be0-4c70-a593-d4772bff9d7c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.941911] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Uploading image a3d7a976-fe51-4642-b8ee-8733f35d1038 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 822.969613] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 822.969613] env[61545]: value = "vm-838652" [ 822.969613] env[61545]: _type = "VirtualMachine" [ 822.969613] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 822.970037] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-40ca9f26-8ea3-4359-9244-f460d8ccba90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.978896] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lease: (returnval){ [ 822.978896] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5248a234-887a-ffe0-7fe6-8a0853988f91" [ 822.978896] env[61545]: _type = "HttpNfcLease" [ 822.978896] env[61545]: } obtained for exporting VM: (result){ [ 822.978896] env[61545]: value = "vm-838652" [ 822.978896] env[61545]: _type = "VirtualMachine" [ 822.978896] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 822.979297] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the lease: (returnval){ [ 822.979297] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5248a234-887a-ffe0-7fe6-8a0853988f91" [ 822.979297] env[61545]: _type = "HttpNfcLease" [ 822.979297] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 822.987273] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255707, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.991923] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 822.991923] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5248a234-887a-ffe0-7fe6-8a0853988f91" [ 822.991923] env[61545]: _type = "HttpNfcLease" [ 822.991923] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 823.010756] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Updating instance_info_cache with network_info: [{"id": "70ed8a3b-0e27-462d-84c6-05185727c589", "address": "fa:16:3e:8d:81:bf", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70ed8a3b-0e", "ovs_interfaceid": "70ed8a3b-0e27-462d-84c6-05185727c589", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.038722] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255708, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.180020] env[61545]: DEBUG nova.scheduler.client.report [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.284237] env[61545]: INFO nova.compute.manager [-] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Took 1.52 seconds to deallocate network for instance. [ 823.484924] env[61545]: DEBUG oslo_vmware.api [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255707, 'name': PowerOnVM_Task, 'duration_secs': 0.691766} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.488526] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 823.488772] env[61545]: INFO nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Took 8.97 seconds to spawn the instance on the hypervisor. [ 823.488961] env[61545]: DEBUG nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 823.489833] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9573b1-9b8d-4640-b285-23367cc6e7e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.497479] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 823.497479] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5248a234-887a-ffe0-7fe6-8a0853988f91" [ 823.497479] env[61545]: _type = "HttpNfcLease" [ 823.497479] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 823.499393] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 823.499393] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5248a234-887a-ffe0-7fe6-8a0853988f91" [ 823.499393] env[61545]: _type = "HttpNfcLease" [ 823.499393] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 823.503093] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09281c97-4d34-41fd-b2c8-afefc9e5cd7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.514061] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5290349d-6ff7-533a-777d-b83a39d59348/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 823.514061] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5290349d-6ff7-533a-777d-b83a39d59348/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 823.515372] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "refresh_cache-d517f427-8580-481b-b50f-150da6c571b9" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.515747] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Instance network_info: |[{"id": "70ed8a3b-0e27-462d-84c6-05185727c589", "address": "fa:16:3e:8d:81:bf", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70ed8a3b-0e", "ovs_interfaceid": "70ed8a3b-0e27-462d-84c6-05185727c589", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.516251] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:81:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13af9422-d668-4413-b63a-766558d83a3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70ed8a3b-0e27-462d-84c6-05185727c589', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.525220] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.526649] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d517f427-8580-481b-b50f-150da6c571b9] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.595438] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8869180b-0a35-41f8-8dda-329f1a057920 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.624076] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255708, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522792} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.625841] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fa08b76f-d64d-46e9-9865-1ab2e9b1d823/fa08b76f-d64d-46e9-9865-1ab2e9b1d823.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.626071] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.626354] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.626354] env[61545]: value = "task-4255710" [ 823.626354] env[61545]: _type = "Task" [ 823.626354] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.627096] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-753f2e8e-aded-4b98-9c65-0291f56e7af7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.637997] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255710, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.639545] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 823.639545] env[61545]: value = "task-4255711" [ 823.639545] env[61545]: _type = "Task" [ 823.639545] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.655021] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255711, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.687123] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.837s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.687123] env[61545]: DEBUG nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 823.689113] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.036s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.691827] env[61545]: INFO nova.compute.claims [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.711931] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-371d0070-fc3d-4e87-b53d-851bc1675e2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.790624] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.914957] env[61545]: DEBUG nova.compute.manager [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] Received event network-changed-70ed8a3b-0e27-462d-84c6-05185727c589 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 823.914957] env[61545]: DEBUG nova.compute.manager [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] Refreshing instance network info cache due to event network-changed-70ed8a3b-0e27-462d-84c6-05185727c589. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 823.915316] env[61545]: DEBUG oslo_concurrency.lockutils [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] Acquiring lock "refresh_cache-d517f427-8580-481b-b50f-150da6c571b9" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.915316] env[61545]: DEBUG oslo_concurrency.lockutils [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] Acquired lock "refresh_cache-d517f427-8580-481b-b50f-150da6c571b9" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.915516] env[61545]: DEBUG nova.network.neutron [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] Refreshing network info cache for port 70ed8a3b-0e27-462d-84c6-05185727c589 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.014542] env[61545]: INFO nova.compute.manager [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Took 59.70 seconds to build instance. [ 824.140482] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255710, 'name': CreateVM_Task, 'duration_secs': 0.40898} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.147026] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d517f427-8580-481b-b50f-150da6c571b9] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 824.147026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.147026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.147026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 824.147828] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4aee41d3-10f8-4f52-b333-1e8be9abeda1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.156735] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255711, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084227} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.160701] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Successfully updated port: ca54cc50-d211-4ada-8e47-1747a860fa83 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 824.160701] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.161264] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2eee50-a8e0-48e1-8005-6e7bbb0f8fa7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.166305] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 824.166305] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5205e8a0-4834-7a11-d162-ada70ebc4077" [ 824.166305] env[61545]: _type = "Task" [ 824.166305] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.195152] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] fa08b76f-d64d-46e9-9865-1ab2e9b1d823/fa08b76f-d64d-46e9-9865-1ab2e9b1d823.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.198201] env[61545]: DEBUG nova.compute.utils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 824.200054] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-415c44c5-9ea2-4f98-baed-b6fd4f4942de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.221594] env[61545]: DEBUG nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 824.221925] env[61545]: DEBUG nova.network.neutron [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.226565] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5205e8a0-4834-7a11-d162-ada70ebc4077, 'name': SearchDatastore_Task, 'duration_secs': 0.010793} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.227723] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.227962] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.228200] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.228348] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.228558] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.229722] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b97166af-5c16-480f-bee0-00f10af48548 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.235056] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 824.235056] env[61545]: value = "task-4255712" [ 824.235056] env[61545]: _type = "Task" [ 824.235056] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.240891] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.241110] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.242345] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3e9af05-a80c-4cd2-8242-213aa7da577d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.249317] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255712, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.254243] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 824.254243] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5279c7af-7d66-1f1e-b302-830eaf6f1768" [ 824.254243] env[61545]: _type = "Task" [ 824.254243] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.265478] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5279c7af-7d66-1f1e-b302-830eaf6f1768, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.331495] env[61545]: DEBUG nova.policy [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c225f8da9ba3458a9cf7794b40ad215d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9fac909b8894a1d92f0a9bcd9739d15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.516793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca08b632-2825-4722-a695-cc1052cd5a37 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "e21de424-8121-4e2f-84c2-8096ba8048cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.546s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.582469] env[61545]: INFO nova.compute.manager [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Rescuing [ 824.582640] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.582910] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.583049] env[61545]: DEBUG nova.network.neutron [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.667766] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "refresh_cache-5b2fb040-a964-479f-ae3f-4f428248d64b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.667766] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "refresh_cache-5b2fb040-a964-479f-ae3f-4f428248d64b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.667766] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.721459] env[61545]: DEBUG nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 824.753355] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255712, 'name': ReconfigVM_Task, 'duration_secs': 0.479259} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.754029] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Reconfigured VM instance instance-00000026 to attach disk [datastore2] fa08b76f-d64d-46e9-9865-1ab2e9b1d823/fa08b76f-d64d-46e9-9865-1ab2e9b1d823.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.758182] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b831acc-8da1-4984-8cb5-c1250d6db8f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.785396] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 824.785396] env[61545]: value = "task-4255713" [ 824.785396] env[61545]: _type = "Task" [ 824.785396] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.785396] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5279c7af-7d66-1f1e-b302-830eaf6f1768, 'name': SearchDatastore_Task, 'duration_secs': 0.012057} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.791769] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01fd5193-c469-4fdb-80da-e81135795582 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.807764] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255713, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.809461] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 824.809461] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d60f76-7a9a-d5a0-c2cb-a8079870bfa3" [ 824.809461] env[61545]: _type = "Task" [ 824.809461] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.832101] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d60f76-7a9a-d5a0-c2cb-a8079870bfa3, 'name': SearchDatastore_Task, 'duration_secs': 0.012581} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.834135] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.834135] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d517f427-8580-481b-b50f-150da6c571b9/d517f427-8580-481b-b50f-150da6c571b9.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 824.834309] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8692d21-9ae5-4b0b-b23f-e5c390565d19 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.843795] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 824.843795] env[61545]: value = "task-4255714" [ 824.843795] env[61545]: _type = "Task" [ 824.843795] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.859103] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.943858] env[61545]: DEBUG nova.network.neutron [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] Updated VIF entry in instance network info cache for port 70ed8a3b-0e27-462d-84c6-05185727c589. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.944371] env[61545]: DEBUG nova.network.neutron [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] Updating instance_info_cache with network_info: [{"id": "70ed8a3b-0e27-462d-84c6-05185727c589", "address": "fa:16:3e:8d:81:bf", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70ed8a3b-0e", "ovs_interfaceid": "70ed8a3b-0e27-462d-84c6-05185727c589", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.020373] env[61545]: DEBUG nova.compute.manager [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 825.122739] env[61545]: DEBUG nova.network.neutron [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Successfully created port: a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.292569] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.299735] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255713, 'name': Rename_Task, 'duration_secs': 0.194349} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.304267] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.304927] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34a2098e-3be7-4288-893f-bb7ab553e534 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.314517] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 825.314517] env[61545]: value = "task-4255715" [ 825.314517] env[61545]: _type = "Task" [ 825.314517] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.330404] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.362079] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255714, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.449077] env[61545]: DEBUG oslo_concurrency.lockutils [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] Releasing lock "refresh_cache-d517f427-8580-481b-b50f-150da6c571b9" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.450189] env[61545]: DEBUG nova.compute.manager [req-c7c0d0bd-9055-40f4-8594-0f5ac12177f7 req-2e4856ee-1b79-441d-a42b-a7322c314c5a service nova] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Received event network-vif-deleted-b4792d3c-8f80-4dde-bfbf-fb8ca94ce86e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 825.552343] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.574307] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdba54a-9546-411f-8b82-2d57906e5ac2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.583900] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd7c91d-37b7-4583-a2ff-5d21b6c99930 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.618834] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b892fd51-712e-4d5d-baff-2212b5190ec5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.629792] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc9189b-81b1-4ec1-8a3e-6e927af81146 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.646173] env[61545]: DEBUG nova.compute.provider_tree [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.732157] env[61545]: DEBUG nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.760504] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.760875] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.761104] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.761365] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.761582] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.761776] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.762051] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.762328] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.762601] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.762840] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.763049] env[61545]: DEBUG nova.virt.hardware [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.764659] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d03ff2-ce50-41df-ad64-0cb1326a6b57 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.773586] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1de357-2bad-4479-bde8-96560b810b5f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.824232] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255715, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.856958] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567047} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.857491] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d517f427-8580-481b-b50f-150da6c571b9/d517f427-8580-481b-b50f-150da6c571b9.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 825.857825] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 825.858122] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdc03ecd-d67f-4d1f-9279-90eb6dc2c5ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.865822] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 825.865822] env[61545]: value = "task-4255716" [ 825.865822] env[61545]: _type = "Task" [ 825.865822] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.879765] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.150071] env[61545]: DEBUG nova.scheduler.client.report [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 826.175728] env[61545]: DEBUG nova.network.neutron [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Updating instance_info_cache with network_info: [{"id": "ca54cc50-d211-4ada-8e47-1747a860fa83", "address": "fa:16:3e:9a:a0:a9", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca54cc50-d2", "ovs_interfaceid": "ca54cc50-d211-4ada-8e47-1747a860fa83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.194282] env[61545]: DEBUG nova.network.neutron [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Updating instance_info_cache with network_info: [{"id": "bb1a376a-94fe-4856-ab21-de3f98dda10e", "address": "fa:16:3e:3e:16:dc", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb1a376a-94", "ovs_interfaceid": "bb1a376a-94fe-4856-ab21-de3f98dda10e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.332246] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255715, 'name': PowerOnVM_Task, 'duration_secs': 0.759651} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.332246] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.332246] env[61545]: INFO nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Took 9.08 seconds to spawn the instance on the hypervisor. [ 826.332246] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.332246] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecdcf9c2-6213-4ce0-bc8d-7dc5124400f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.378194] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08531} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.378542] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.379289] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f10337-a814-4e4c-8b49-78da0bf09430 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.405892] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] d517f427-8580-481b-b50f-150da6c571b9/d517f427-8580-481b-b50f-150da6c571b9.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.408478] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7010ec8b-a174-4998-bb23-0f2721d7ef91 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.432177] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 826.432177] env[61545]: value = "task-4255717" [ 826.432177] env[61545]: _type = "Task" [ 826.432177] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.444374] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255717, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.496351] env[61545]: DEBUG nova.compute.manager [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Received event network-vif-plugged-ca54cc50-d211-4ada-8e47-1747a860fa83 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 826.496501] env[61545]: DEBUG oslo_concurrency.lockutils [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] Acquiring lock "5b2fb040-a964-479f-ae3f-4f428248d64b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.496737] env[61545]: DEBUG oslo_concurrency.lockutils [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] Lock "5b2fb040-a964-479f-ae3f-4f428248d64b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.496998] env[61545]: DEBUG oslo_concurrency.lockutils [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] Lock "5b2fb040-a964-479f-ae3f-4f428248d64b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.497107] env[61545]: DEBUG nova.compute.manager [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] No waiting events found dispatching network-vif-plugged-ca54cc50-d211-4ada-8e47-1747a860fa83 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 826.497292] env[61545]: WARNING nova.compute.manager [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Received unexpected event network-vif-plugged-ca54cc50-d211-4ada-8e47-1747a860fa83 for instance with vm_state building and task_state spawning. [ 826.497515] env[61545]: DEBUG nova.compute.manager [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Received event network-changed-ca54cc50-d211-4ada-8e47-1747a860fa83 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 826.497694] env[61545]: DEBUG nova.compute.manager [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Refreshing instance network info cache due to event network-changed-ca54cc50-d211-4ada-8e47-1747a860fa83. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 826.497875] env[61545]: DEBUG oslo_concurrency.lockutils [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] Acquiring lock "refresh_cache-5b2fb040-a964-479f-ae3f-4f428248d64b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.659100] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.970s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.659100] env[61545]: DEBUG nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.663581] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.625s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.665365] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.667280] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.926s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.668421] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.670723] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.605s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.673426] env[61545]: INFO nova.compute.claims [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.681027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "refresh_cache-5b2fb040-a964-479f-ae3f-4f428248d64b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.681027] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Instance network_info: |[{"id": "ca54cc50-d211-4ada-8e47-1747a860fa83", "address": "fa:16:3e:9a:a0:a9", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca54cc50-d2", "ovs_interfaceid": "ca54cc50-d211-4ada-8e47-1747a860fa83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 826.681027] env[61545]: DEBUG oslo_concurrency.lockutils [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] Acquired lock "refresh_cache-5b2fb040-a964-479f-ae3f-4f428248d64b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.681027] env[61545]: DEBUG nova.network.neutron [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Refreshing network info cache for port ca54cc50-d211-4ada-8e47-1747a860fa83 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.687124] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:a0:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13af9422-d668-4413-b63a-766558d83a3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca54cc50-d211-4ada-8e47-1747a860fa83', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 826.697934] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 826.699078] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 826.699192] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48ec295a-4890-40ad-9507-eb9a9d0f4cab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.721918] env[61545]: INFO nova.scheduler.client.report [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Deleted allocations for instance 8a3ac91d-8949-4745-9161-1a70899c0293 [ 826.726881] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "refresh_cache-e21de424-8121-4e2f-84c2-8096ba8048cc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.731717] env[61545]: INFO nova.scheduler.client.report [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Deleted allocations for instance 2a0576f9-d740-4dfa-9783-17eb3987840b [ 826.740707] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 826.740707] env[61545]: value = "task-4255718" [ 826.740707] env[61545]: _type = "Task" [ 826.740707] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.753272] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255718, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.856625] env[61545]: INFO nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Took 57.63 seconds to build instance. [ 826.942958] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255717, 'name': ReconfigVM_Task, 'duration_secs': 0.329099} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.943047] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Reconfigured VM instance instance-00000027 to attach disk [datastore2] d517f427-8580-481b-b50f-150da6c571b9/d517f427-8580-481b-b50f-150da6c571b9.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 826.943765] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d80fda2-1dcf-4085-be75-f6e653908c55 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.952245] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 826.952245] env[61545]: value = "task-4255719" [ 826.952245] env[61545]: _type = "Task" [ 826.952245] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.965181] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255719, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.180028] env[61545]: DEBUG nova.compute.utils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 827.184442] env[61545]: DEBUG nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 827.184659] env[61545]: DEBUG nova.network.neutron [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.242079] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d2291fa0-367d-470f-99a8-06fb865cb2a7 tempest-ServersAdminNegativeTestJSON-1477357562 tempest-ServersAdminNegativeTestJSON-1477357562-project-member] Lock "8a3ac91d-8949-4745-9161-1a70899c0293" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.047s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.243263] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72d3e66b-d0a6-40d3-a5a4-1d16f2579f55 tempest-ServerMetadataTestJSON-447458016 tempest-ServerMetadataTestJSON-447458016-project-member] Lock "2a0576f9-d740-4dfa-9783-17eb3987840b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.246s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.266311] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255718, 'name': CreateVM_Task, 'duration_secs': 0.515219} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.269287] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 827.270240] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.270418] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.273503] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 827.273503] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05c8a899-ce8c-4fc7-8b7d-560a91e9abaf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.280118] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 827.280118] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e005f3-6312-0b13-b1b5-fb144d2ba100" [ 827.280118] env[61545]: _type = "Task" [ 827.280118] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.292861] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e005f3-6312-0b13-b1b5-fb144d2ba100, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.320267] env[61545]: DEBUG nova.policy [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e3c2a266409412ab551abbbb9a17312', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f50ef718e3a64a50b3c40ff4bc29e673', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 827.358458] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.542s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.468694] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255719, 'name': Rename_Task, 'duration_secs': 0.232906} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.469418] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.470224] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d783688-7c5c-4c00-ae23-3a1d29ff5cc7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.480517] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 827.480517] env[61545]: value = "task-4255720" [ 827.480517] env[61545]: _type = "Task" [ 827.480517] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.493237] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255720, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.686552] env[61545]: DEBUG nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 827.801700] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e005f3-6312-0b13-b1b5-fb144d2ba100, 'name': SearchDatastore_Task, 'duration_secs': 0.013189} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.802082] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.802325] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 827.802623] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.802718] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.802877] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.810899] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9625f344-27cf-4d58-862e-3c2bcd656d3c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.825020] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.825020] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 827.825020] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26f527f3-a9fb-4466-a011-05d2790bce68 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.837313] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 827.837313] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521001aa-5800-7bfc-4b4b-962d4cb13b43" [ 827.837313] env[61545]: _type = "Task" [ 827.837313] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.851167] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521001aa-5800-7bfc-4b4b-962d4cb13b43, 'name': SearchDatastore_Task, 'duration_secs': 0.012501} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.851167] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48f647f1-cf81-4cee-9d25-6d98d27006d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.856951] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 827.856951] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e2739a-a8b5-a88b-f7ce-de005aa9d08b" [ 827.856951] env[61545]: _type = "Task" [ 827.856951] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.871284] env[61545]: DEBUG nova.compute.manager [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 827.885645] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e2739a-a8b5-a88b-f7ce-de005aa9d08b, 'name': SearchDatastore_Task, 'duration_secs': 0.012806} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.886523] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.886523] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5b2fb040-a964-479f-ae3f-4f428248d64b/5b2fb040-a964-479f-ae3f-4f428248d64b.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.886523] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5389d772-7b17-4c64-b282-914cfa3a674e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.901021] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 827.901021] env[61545]: value = "task-4255721" [ 827.901021] env[61545]: _type = "Task" [ 827.901021] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.909481] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.952328] env[61545]: DEBUG nova.network.neutron [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Updated VIF entry in instance network info cache for port ca54cc50-d211-4ada-8e47-1747a860fa83. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.952763] env[61545]: DEBUG nova.network.neutron [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Updating instance_info_cache with network_info: [{"id": "ca54cc50-d211-4ada-8e47-1747a860fa83", "address": "fa:16:3e:9a:a0:a9", "network": {"id": "071128a4-fced-4494-832d-354e630cfc4a", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1926647119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b77b701ec241d9ab2ccfa6a15dbf26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13af9422-d668-4413-b63a-766558d83a3b", "external-id": "nsx-vlan-transportzone-842", "segmentation_id": 842, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca54cc50-d2", "ovs_interfaceid": "ca54cc50-d211-4ada-8e47-1747a860fa83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.996489] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255720, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.278894] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.278894] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22466b9d-60bd-4de8-b17a-d60ac2d789da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.290364] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 828.290364] env[61545]: value = "task-4255722" [ 828.290364] env[61545]: _type = "Task" [ 828.290364] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.304422] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.358805] env[61545]: DEBUG nova.network.neutron [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Successfully updated port: a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.396939] env[61545]: DEBUG nova.network.neutron [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Successfully created port: 3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.403137] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.419083] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255721, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.458181] env[61545]: DEBUG oslo_concurrency.lockutils [req-12e6ce35-3b45-446d-a1af-90c5793e6095 req-e2c50191-b296-4cc0-a1ff-49ea6e46f201 service nova] Releasing lock "refresh_cache-5b2fb040-a964-479f-ae3f-4f428248d64b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.501490] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255720, 'name': PowerOnVM_Task, 'duration_secs': 0.562985} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.501786] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.501993] env[61545]: INFO nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Took 8.46 seconds to spawn the instance on the hypervisor. [ 828.502187] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.503042] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1868331-8d38-4aaa-a0d7-153058308854 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.617150] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039df506-efa7-4832-93b9-636bdf26915c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.628415] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641d3319-4103-48b3-86a0-fc64337cae66 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.665237] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c9e41f-2216-4baa-becc-cc6c66261aa7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.677533] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62a3d0c-a483-4e7a-980b-24604dd4e452 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.693638] env[61545]: DEBUG nova.compute.provider_tree [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.708482] env[61545]: DEBUG nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 828.744148] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.746285] env[61545]: DEBUG nova.virt.hardware [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.747145] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9dbb32-12e3-4622-9365-8bd3e7472b59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.757281] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196d513f-3a4b-4c1a-8fa6-fe1f507c5bc6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.800795] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255722, 'name': PowerOffVM_Task, 'duration_secs': 0.360396} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.801669] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.802567] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda399e9-b629-41a5-b3b4-456453a43fe1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.824950] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9dfd72-b7b2-4718-ba59-6d8aa89ae1c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.860486] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.861292] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.861292] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquired lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.861420] env[61545]: DEBUG nova.network.neutron [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.862714] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fb63721-7a68-4779-bd22-282408d828d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.873747] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 828.873747] env[61545]: value = "task-4255723" [ 828.873747] env[61545]: _type = "Task" [ 828.873747] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.891784] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 828.891967] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 828.892348] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.892634] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.892860] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 828.893166] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80ab2d1b-2945-4a22-9517-9065c4de5c6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.901482] env[61545]: DEBUG nova.compute.manager [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Received event network-vif-plugged-a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 828.901742] env[61545]: DEBUG oslo_concurrency.lockutils [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] Acquiring lock "609ba431-b42b-4b0d-9c16-06e19bee114c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.902241] env[61545]: DEBUG oslo_concurrency.lockutils [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] Lock "609ba431-b42b-4b0d-9c16-06e19bee114c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.902241] env[61545]: DEBUG oslo_concurrency.lockutils [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] Lock "609ba431-b42b-4b0d-9c16-06e19bee114c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.902446] env[61545]: DEBUG nova.compute.manager [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] No waiting events found dispatching network-vif-plugged-a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 828.902485] env[61545]: WARNING nova.compute.manager [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Received unexpected event network-vif-plugged-a08667b0-f29d-4bd2-8394-b73a26086238 for instance with vm_state building and task_state spawning. [ 828.902645] env[61545]: DEBUG nova.compute.manager [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Received event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 828.902858] env[61545]: DEBUG nova.compute.manager [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing instance network info cache due to event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 828.902948] env[61545]: DEBUG oslo_concurrency.lockutils [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] Acquiring lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.912497] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 828.912497] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 828.912497] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa6597b8-7550-42a6-b899-392b2460c05a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.919856] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 828.919856] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521fde5e-eb2f-05b5-0932-a3050380cb76" [ 828.919856] env[61545]: _type = "Task" [ 828.919856] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.924900] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255721, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605611} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.928998] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5b2fb040-a964-479f-ae3f-4f428248d64b/5b2fb040-a964-479f-ae3f-4f428248d64b.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.929280] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.929964] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f63636c-a55c-4036-8534-a63235310c95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.940273] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521fde5e-eb2f-05b5-0932-a3050380cb76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.942047] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 828.942047] env[61545]: value = "task-4255724" [ 828.942047] env[61545]: _type = "Task" [ 828.942047] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.954257] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255724, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.027948] env[61545]: INFO nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Took 53.74 seconds to build instance. [ 829.201860] env[61545]: DEBUG nova.scheduler.client.report [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 829.431033] env[61545]: DEBUG nova.network.neutron [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.446755] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521fde5e-eb2f-05b5-0932-a3050380cb76, 'name': SearchDatastore_Task, 'duration_secs': 0.02087} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.450742] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ebc4a1f-fb11-439e-80ae-1a92d78f5d49 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.466430] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255724, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07738} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.468160] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.468695] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 829.468695] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c20d0b-edfb-d351-209a-da7b36af7711" [ 829.468695] env[61545]: _type = "Task" [ 829.468695] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.470027] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0428dd-5ec7-4d3c-84c0-f4183a69b9c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.484169] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c20d0b-edfb-d351-209a-da7b36af7711, 'name': SearchDatastore_Task, 'duration_secs': 0.013294} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.493603] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.493916] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. {{(pid=61545) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 829.503955] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 5b2fb040-a964-479f-ae3f-4f428248d64b/5b2fb040-a964-479f-ae3f-4f428248d64b.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.504522] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4551b3d-7754-4dc4-b06a-43bd377de1a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.506892] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28a97d5a-776e-4ac8-8361-db5dd16819ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.531140] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "d517f427-8580-481b-b50f-150da6c571b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.660s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.534947] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 829.534947] env[61545]: value = "task-4255725" [ 829.534947] env[61545]: _type = "Task" [ 829.534947] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.538298] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 829.538298] env[61545]: value = "task-4255726" [ 829.538298] env[61545]: _type = "Task" [ 829.538298] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.555117] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.560375] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.675745] env[61545]: DEBUG nova.network.neutron [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updating instance_info_cache with network_info: [{"id": "a08667b0-f29d-4bd2-8394-b73a26086238", "address": "fa:16:3e:a5:10:28", "network": {"id": "63a7ad86-09da-4985-898c-dda30d7e3d2f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084327320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d9fac909b8894a1d92f0a9bcd9739d15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa08667b0-f2", "ovs_interfaceid": "a08667b0-f29d-4bd2-8394-b73a26086238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.709621] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.039s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.710172] env[61545]: DEBUG nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 829.713393] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.327s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.716482] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.716482] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.041s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.717948] env[61545]: INFO nova.compute.claims [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.774839] env[61545]: INFO nova.scheduler.client.report [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Deleted allocations for instance 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f [ 829.968124] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.968363] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.035419] env[61545]: DEBUG nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 830.053093] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.057835] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255725, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.180709] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Releasing lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.180709] env[61545]: DEBUG nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Instance network_info: |[{"id": "a08667b0-f29d-4bd2-8394-b73a26086238", "address": "fa:16:3e:a5:10:28", "network": {"id": "63a7ad86-09da-4985-898c-dda30d7e3d2f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084327320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d9fac909b8894a1d92f0a9bcd9739d15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa08667b0-f2", "ovs_interfaceid": "a08667b0-f29d-4bd2-8394-b73a26086238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 830.180709] env[61545]: DEBUG oslo_concurrency.lockutils [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] Acquired lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.180709] env[61545]: DEBUG nova.network.neutron [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.182051] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:10:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a08667b0-f29d-4bd2-8394-b73a26086238', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 830.192219] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Creating folder: Project (d9fac909b8894a1d92f0a9bcd9739d15). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.195458] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cf7337a-d1b2-4eed-b0ab-c11ac18abb1a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.209155] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Created folder: Project (d9fac909b8894a1d92f0a9bcd9739d15) in parent group-v838542. [ 830.210420] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Creating folder: Instances. Parent ref: group-v838655. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.210420] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df6365b5-afff-4634-ab4f-e504a1aff800 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.222839] env[61545]: DEBUG nova.compute.utils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 830.227882] env[61545]: DEBUG nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 830.228263] env[61545]: DEBUG nova.network.neutron [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 830.231048] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Created folder: Instances in parent group-v838655. [ 830.231401] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 830.232335] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 830.232891] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-588b6c35-f640-41d1-b132-77c21453929a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.259944] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 830.259944] env[61545]: value = "task-4255729" [ 830.259944] env[61545]: _type = "Task" [ 830.259944] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.271818] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255729, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.289629] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d0f0bbc-3a76-4d99-ac84-1a474c5402bf tempest-AttachInterfacesUnderV243Test-426069374 tempest-AttachInterfacesUnderV243Test-426069374-project-member] Lock "6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.381s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.423114] env[61545]: DEBUG nova.policy [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6b70f49819d45aaa5fa2b56cb8cd3e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f29228f7495747ca97b16aa485960e14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 830.561913] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592705} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.562459] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. [ 830.563114] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52245b5d-60db-41fb-892c-59e0cd9a5d53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.569942] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255726, 'name': ReconfigVM_Task, 'duration_secs': 0.745532} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.573378] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 5b2fb040-a964-479f-ae3f-4f428248d64b/5b2fb040-a964-479f-ae3f-4f428248d64b.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.573378] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b4b67eb-7e2c-4db0-a150-2917bfb3f15e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.608916] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 830.608916] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.608916] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b00af5b6-e6c6-4843-9e90-31498057839d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.625181] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 830.625181] env[61545]: value = "task-4255730" [ 830.625181] env[61545]: _type = "Task" [ 830.625181] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.635572] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.635572] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.646205] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255730, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.646205] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 830.646205] env[61545]: value = "task-4255731" [ 830.646205] env[61545]: _type = "Task" [ 830.646205] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.661874] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255731, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.727904] env[61545]: DEBUG nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 830.782812] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255729, 'name': CreateVM_Task, 'duration_secs': 0.4733} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.785848] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.787282] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.787282] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.787484] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 830.789442] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffaff4a7-8a98-4868-b3f5-ce440c79ad50 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.798023] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 830.798023] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52dc4c5a-7435-2233-98ce-8177cdbb2709" [ 830.798023] env[61545]: _type = "Task" [ 830.798023] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.806542] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52dc4c5a-7435-2233-98ce-8177cdbb2709, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.139535] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255730, 'name': Rename_Task, 'duration_secs': 0.230883} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.139826] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 831.140098] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11dd1a81-25cc-45b7-b113-8a0061cfe0f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.151703] env[61545]: DEBUG nova.network.neutron [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updated VIF entry in instance network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.152695] env[61545]: DEBUG nova.network.neutron [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updating instance_info_cache with network_info: [{"id": "a08667b0-f29d-4bd2-8394-b73a26086238", "address": "fa:16:3e:a5:10:28", "network": {"id": "63a7ad86-09da-4985-898c-dda30d7e3d2f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084327320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d9fac909b8894a1d92f0a9bcd9739d15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa08667b0-f2", "ovs_interfaceid": "a08667b0-f29d-4bd2-8394-b73a26086238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.159575] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 831.159575] env[61545]: value = "task-4255732" [ 831.159575] env[61545]: _type = "Task" [ 831.159575] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.160360] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255731, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.174386] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255732, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.314890] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52dc4c5a-7435-2233-98ce-8177cdbb2709, 'name': SearchDatastore_Task, 'duration_secs': 0.011966} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.315255] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.315522] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.315884] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.316092] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.317126] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.317126] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4dd86488-2686-4bf0-ac58-21feaf602a1f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.335440] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.335440] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.335440] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd604e77-b799-4cd3-ae18-6acfedf47a34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.342287] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 831.342287] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527ef64d-af0c-cdb4-c10a-9c6b26b08253" [ 831.342287] env[61545]: _type = "Task" [ 831.342287] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.356485] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527ef64d-af0c-cdb4-c10a-9c6b26b08253, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.394655] env[61545]: DEBUG nova.network.neutron [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Successfully updated port: 3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.500067] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a16d7ca-1d90-478e-b9d6-2c30695a6aef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.509354] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93dabc25-ecf2-413b-be6e-d13c5de4be55 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.542295] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f56d717-9267-40bd-85c8-5135b26453ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.552515] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7416ac71-cefb-402d-b492-0e14dc7485ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.570090] env[61545]: DEBUG nova.compute.provider_tree [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 831.657039] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255731, 'name': ReconfigVM_Task, 'duration_secs': 0.724408} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.657369] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Reconfigured VM instance instance-00000025 to attach disk [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 831.658629] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6355a4ce-1eb7-4826-8270-386832859990 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.664532] env[61545]: DEBUG oslo_concurrency.lockutils [req-97accdfd-1f31-4d25-8bf7-5e972a943c77 req-c35c851d-05d7-48a8-9d5a-d336ae63177c service nova] Releasing lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.687915] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c2e40f5-b3b1-4adb-a621-00775a208ea7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.704830] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255732, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.706229] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 831.706229] env[61545]: value = "task-4255733" [ 831.706229] env[61545]: _type = "Task" [ 831.706229] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.714945] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255733, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.746880] env[61545]: DEBUG nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 831.782944] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.783233] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.783390] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.783573] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.783718] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.783892] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.784179] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.784370] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.784600] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.784861] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.785116] env[61545]: DEBUG nova.virt.hardware [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.786046] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e84b76-260e-4d10-9a30-dc7a05a11e08 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.797139] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736ccff7-2249-4156-bf6f-1a92f341c945 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.803229] env[61545]: DEBUG nova.network.neutron [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Successfully created port: 16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.856925] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527ef64d-af0c-cdb4-c10a-9c6b26b08253, 'name': SearchDatastore_Task, 'duration_secs': 0.020403} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.860162] env[61545]: DEBUG nova.compute.manager [req-2266f961-8251-4b7b-90f6-9ec1958bb62c req-3d487ff6-f180-48ff-8304-62b37f68d618 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Received event network-vif-plugged-3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 831.860572] env[61545]: DEBUG oslo_concurrency.lockutils [req-2266f961-8251-4b7b-90f6-9ec1958bb62c req-3d487ff6-f180-48ff-8304-62b37f68d618 service nova] Acquiring lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.860747] env[61545]: DEBUG oslo_concurrency.lockutils [req-2266f961-8251-4b7b-90f6-9ec1958bb62c req-3d487ff6-f180-48ff-8304-62b37f68d618 service nova] Lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.860920] env[61545]: DEBUG oslo_concurrency.lockutils [req-2266f961-8251-4b7b-90f6-9ec1958bb62c req-3d487ff6-f180-48ff-8304-62b37f68d618 service nova] Lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.861102] env[61545]: DEBUG nova.compute.manager [req-2266f961-8251-4b7b-90f6-9ec1958bb62c req-3d487ff6-f180-48ff-8304-62b37f68d618 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] No waiting events found dispatching network-vif-plugged-3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 831.861274] env[61545]: WARNING nova.compute.manager [req-2266f961-8251-4b7b-90f6-9ec1958bb62c req-3d487ff6-f180-48ff-8304-62b37f68d618 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Received unexpected event network-vif-plugged-3f0de1dd-63b8-4054-827e-0daae86eaaa5 for instance with vm_state building and task_state spawning. [ 831.861602] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb0e524-afab-4aad-a98c-e4ec2f8d8f3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.868955] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 831.868955] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f90b97-d065-35ac-2411-2d77f8aa02c9" [ 831.868955] env[61545]: _type = "Task" [ 831.868955] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.883875] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f90b97-d065-35ac-2411-2d77f8aa02c9, 'name': SearchDatastore_Task, 'duration_secs': 0.011832} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.884183] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.884449] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c/609ba431-b42b-4b0d-9c16-06e19bee114c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.885393] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b15e5926-e603-4809-b378-8e673838e86b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.894699] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 831.894699] env[61545]: value = "task-4255734" [ 831.894699] env[61545]: _type = "Task" [ 831.894699] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.898777] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquiring lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.898928] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquired lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.899097] env[61545]: DEBUG nova.network.neutron [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.905962] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.097529] env[61545]: ERROR nova.scheduler.client.report [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [req-8e01ff75-ec24-4288-a49e-7b4f72583a2f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8e01ff75-ec24-4288-a49e-7b4f72583a2f"}]} [ 832.134482] env[61545]: DEBUG nova.scheduler.client.report [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 832.148442] env[61545]: DEBUG nova.scheduler.client.report [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 832.148835] env[61545]: DEBUG nova.compute.provider_tree [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 832.174174] env[61545]: DEBUG nova.scheduler.client.report [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 832.179600] env[61545]: DEBUG oslo_vmware.api [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255732, 'name': PowerOnVM_Task, 'duration_secs': 0.887378} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.179942] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 832.180171] env[61545]: INFO nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Took 9.32 seconds to spawn the instance on the hypervisor. [ 832.180356] env[61545]: DEBUG nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 832.181290] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb34b21d-e9b6-4da5-b9e7-fa3bbcb7c3ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.215173] env[61545]: DEBUG nova.scheduler.client.report [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 832.231688] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255733, 'name': ReconfigVM_Task, 'duration_secs': 0.275622} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.232311] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 832.232856] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb9eec74-ac00-43dc-ae49-1d36c61d6a18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.245367] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 832.245367] env[61545]: value = "task-4255735" [ 832.245367] env[61545]: _type = "Task" [ 832.245367] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.257866] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.410817] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255734, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.463505] env[61545]: DEBUG nova.network.neutron [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.707416] env[61545]: INFO nova.compute.manager [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Took 55.33 seconds to build instance. [ 832.757722] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255735, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.790609] env[61545]: DEBUG nova.network.neutron [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Updating instance_info_cache with network_info: [{"id": "3f0de1dd-63b8-4054-827e-0daae86eaaa5", "address": "fa:16:3e:a4:0a:73", "network": {"id": "07247a37-d94b-40cb-a352-293d3390a79f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-512329380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ef718e3a64a50b3c40ff4bc29e673", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0de1dd-63", "ovs_interfaceid": "3f0de1dd-63b8-4054-827e-0daae86eaaa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.913519] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627656} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.917332] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c/609ba431-b42b-4b0d-9c16-06e19bee114c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.917332] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.917332] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0eec6abb-beae-4ba1-8410-8c43e4c50d9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.930742] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 832.930742] env[61545]: value = "task-4255736" [ 832.930742] env[61545]: _type = "Task" [ 832.930742] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.944766] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.991061] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60245b88-b83f-4e16-b325-1ab8963d8a73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.004878] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d71957-9496-46bf-ae7c-37f114892fff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.039201] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8f9b09-65bb-4757-8616-7dbbf4bbce68 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.049163] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf1a403-55b7-408e-84ff-a61126dacf33 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.066690] env[61545]: DEBUG nova.compute.provider_tree [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.207319] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b622ba01-6bf1-4e83-8799-56669730b673 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "5b2fb040-a964-479f-ae3f-4f428248d64b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.300s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.259092] env[61545]: DEBUG oslo_vmware.api [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255735, 'name': PowerOnVM_Task, 'duration_secs': 0.761095} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.259425] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 833.262289] env[61545]: DEBUG nova.compute.manager [None req-6b0b05ce-fb7c-4337-a4ce-a36f2a556aa5 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 833.263213] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e7a752-5070-4cda-9dac-0053a0ebc2a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.296783] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Releasing lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.296783] env[61545]: DEBUG nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Instance network_info: |[{"id": "3f0de1dd-63b8-4054-827e-0daae86eaaa5", "address": "fa:16:3e:a4:0a:73", "network": {"id": "07247a37-d94b-40cb-a352-293d3390a79f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-512329380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ef718e3a64a50b3c40ff4bc29e673", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0de1dd-63", "ovs_interfaceid": "3f0de1dd-63b8-4054-827e-0daae86eaaa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 833.296783] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:0a:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ee018eb-75be-4037-a80a-07034d4eae35', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f0de1dd-63b8-4054-827e-0daae86eaaa5', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.306849] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Creating folder: Project (f50ef718e3a64a50b3c40ff4bc29e673). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.307273] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f0335d5-c1b0-4531-9e04-16a3673b868e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.320911] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Created folder: Project (f50ef718e3a64a50b3c40ff4bc29e673) in parent group-v838542. [ 833.320999] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Creating folder: Instances. Parent ref: group-v838658. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.321288] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3cc0a0c4-c8a3-4433-a0a1-b96c2b6a1840 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.334963] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Created folder: Instances in parent group-v838658. [ 833.335242] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.335698] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.335698] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0c41f36-beb2-4267-9d10-932fc4073fad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.356998] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.356998] env[61545]: value = "task-4255739" [ 833.356998] env[61545]: _type = "Task" [ 833.356998] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.366653] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255739, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.452040] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094629} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.452040] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.452040] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbda690-b36f-43f4-96a0-b29a705148c7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.481209] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c/609ba431-b42b-4b0d-9c16-06e19bee114c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.482357] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a7b2761-a1d8-419e-bf23-738e9d4c5bba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.507030] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 833.507030] env[61545]: value = "task-4255740" [ 833.507030] env[61545]: _type = "Task" [ 833.507030] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.517549] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.619022] env[61545]: DEBUG nova.scheduler.client.report [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 833.619022] env[61545]: DEBUG nova.compute.provider_tree [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 75 to 76 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 833.619022] env[61545]: DEBUG nova.compute.provider_tree [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.713037] env[61545]: DEBUG nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 833.871142] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255739, 'name': CreateVM_Task, 'duration_secs': 0.507338} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.871312] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.872367] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.872535] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.872915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.873245] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02349ea0-4279-41fc-917d-e44f90be9d9f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.885027] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 833.885027] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529eda92-04db-8fbf-e489-0ec766715c61" [ 833.885027] env[61545]: _type = "Task" [ 833.885027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.893753] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529eda92-04db-8fbf-e489-0ec766715c61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.909676] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.909822] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.910019] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.910207] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.910378] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.912737] env[61545]: DEBUG nova.network.neutron [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Successfully updated port: 16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 833.914263] env[61545]: INFO nova.compute.manager [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Terminating instance [ 834.020021] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255740, 'name': ReconfigVM_Task, 'duration_secs': 0.464198} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.020396] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c/609ba431-b42b-4b0d-9c16-06e19bee114c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.021190] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1d29eff-7964-42d4-90ed-2d66db1e5f62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.030199] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 834.030199] env[61545]: value = "task-4255741" [ 834.030199] env[61545]: _type = "Task" [ 834.030199] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.040444] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255741, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.122968] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.407s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.123525] env[61545]: DEBUG nova.compute.manager [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 834.126977] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.946s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.127283] env[61545]: DEBUG nova.objects.instance [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lazy-loading 'resources' on Instance uuid b2579785-d1a4-48da-ba27-6ee3098578f1 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 834.240106] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.320538] env[61545]: DEBUG nova.compute.manager [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Received event network-changed-3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 834.320762] env[61545]: DEBUG nova.compute.manager [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Refreshing instance network info cache due to event network-changed-3f0de1dd-63b8-4054-827e-0daae86eaaa5. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 834.320990] env[61545]: DEBUG oslo_concurrency.lockutils [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] Acquiring lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.321152] env[61545]: DEBUG oslo_concurrency.lockutils [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] Acquired lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.321319] env[61545]: DEBUG nova.network.neutron [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Refreshing network info cache for port 3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.404095] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529eda92-04db-8fbf-e489-0ec766715c61, 'name': SearchDatastore_Task, 'duration_secs': 0.013246} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.404095] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.404095] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.404095] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.404095] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.404095] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.404095] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2016800b-1834-4c7b-be6d-d318e87cdf98 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.416328] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.416328] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.416454] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.416725] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.416725] env[61545]: DEBUG nova.network.neutron [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.423845] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90a57ece-ed1e-4cad-b781-a213e885efe4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.428777] env[61545]: DEBUG nova.compute.manager [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.431215] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.431215] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f37091e-f3b5-4b5f-95ca-577e42b2653f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.439937] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 834.439937] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c4f7c1-3ea2-f4d3-d6a9-e038986335ce" [ 834.439937] env[61545]: _type = "Task" [ 834.439937] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.443971] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.448217] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45f5e744-b055-4e90-a6a6-2f56f669bda6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.461590] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c4f7c1-3ea2-f4d3-d6a9-e038986335ce, 'name': SearchDatastore_Task, 'duration_secs': 0.017158} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.463737] env[61545]: DEBUG oslo_vmware.api [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 834.463737] env[61545]: value = "task-4255742" [ 834.463737] env[61545]: _type = "Task" [ 834.463737] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.463737] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e935bcf9-c8da-4c6c-bd16-dcc88bbd965b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.475429] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 834.475429] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e34ebd-8a64-2402-14ef-7d6a6c9bc726" [ 834.475429] env[61545]: _type = "Task" [ 834.475429] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.479780] env[61545]: DEBUG oslo_vmware.api [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.490431] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e34ebd-8a64-2402-14ef-7d6a6c9bc726, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.542064] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255741, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.630307] env[61545]: DEBUG nova.compute.utils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 834.631952] env[61545]: DEBUG nova.compute.manager [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 834.706107] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5290349d-6ff7-533a-777d-b83a39d59348/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 834.707606] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f85177c-22c9-46fb-a8e4-66c9a051397e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.717808] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5290349d-6ff7-533a-777d-b83a39d59348/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 834.718109] env[61545]: ERROR oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5290349d-6ff7-533a-777d-b83a39d59348/disk-0.vmdk due to incomplete transfer. [ 834.718677] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f47c6ada-be08-4360-995f-7346004ce3ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.728883] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5290349d-6ff7-533a-777d-b83a39d59348/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 834.729235] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Uploaded image a3d7a976-fe51-4642-b8ee-8733f35d1038 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 834.732585] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 834.737087] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a6558f8c-12b1-4df3-a111-f3c3c9253659 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.746712] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 834.746712] env[61545]: value = "task-4255743" [ 834.746712] env[61545]: _type = "Task" [ 834.746712] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.756914] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255743, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.979039] env[61545]: DEBUG oslo_vmware.api [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.996523] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e34ebd-8a64-2402-14ef-7d6a6c9bc726, 'name': SearchDatastore_Task, 'duration_secs': 0.014831} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.996657] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.996923] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 9cf6dd9e-40e9-4df6-9342-2850e0f93d85/9cf6dd9e-40e9-4df6-9342-2850e0f93d85.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.997239] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6caa4cf-99b4-44ac-a285-281bf45d5ccf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.006266] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 835.006266] env[61545]: value = "task-4255744" [ 835.006266] env[61545]: _type = "Task" [ 835.006266] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.011049] env[61545]: DEBUG nova.network.neutron [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.024154] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255744, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.047664] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255741, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.140043] env[61545]: DEBUG nova.compute.manager [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 835.260965] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255743, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.414858] env[61545]: DEBUG nova.network.neutron [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Updating instance_info_cache with network_info: [{"id": "16667e69-57e6-426e-8b7e-0da6159f84bb", "address": "fa:16:3e:10:6f:d5", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16667e69-57", "ovs_interfaceid": "16667e69-57e6-426e-8b7e-0da6159f84bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.429459] env[61545]: DEBUG nova.network.neutron [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Updated VIF entry in instance network info cache for port 3f0de1dd-63b8-4054-827e-0daae86eaaa5. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.429459] env[61545]: DEBUG nova.network.neutron [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Updating instance_info_cache with network_info: [{"id": "3f0de1dd-63b8-4054-827e-0daae86eaaa5", "address": "fa:16:3e:a4:0a:73", "network": {"id": "07247a37-d94b-40cb-a352-293d3390a79f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-512329380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ef718e3a64a50b3c40ff4bc29e673", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0de1dd-63", "ovs_interfaceid": "3f0de1dd-63b8-4054-827e-0daae86eaaa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.475703] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064f1b82-3e8d-4fad-aa15-751618346eb1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.484335] env[61545]: DEBUG oslo_vmware.api [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255742, 'name': PowerOffVM_Task, 'duration_secs': 0.610447} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.484335] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.484335] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.484670] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45cb99cd-4a79-447e-8033-8452e4c08426 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.490407] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0c28d9-779c-4ae0-92d2-9c205e419830 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.529664] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8ffea2-1a26-459a-9142-27c5a5b12b92 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.544578] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255744, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.546673] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510f73a9-a45d-48cb-be4f-3970038f618e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.554848] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255741, 'name': Rename_Task, 'duration_secs': 1.050185} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.555687] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.556024] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62745e78-6ded-4850-b45c-e83824763aba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.567111] env[61545]: DEBUG nova.compute.provider_tree [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.572336] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.572336] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.572336] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleting the datastore file [datastore2] fa08b76f-d64d-46e9-9865-1ab2e9b1d823 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.572336] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cf1462f-51f6-4a82-91b7-6d05f98a7058 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.575787] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 835.575787] env[61545]: value = "task-4255746" [ 835.575787] env[61545]: _type = "Task" [ 835.575787] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.581053] env[61545]: DEBUG oslo_vmware.api [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 835.581053] env[61545]: value = "task-4255747" [ 835.581053] env[61545]: _type = "Task" [ 835.581053] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.592396] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255746, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.596345] env[61545]: DEBUG oslo_vmware.api [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255747, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.758098] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255743, 'name': Destroy_Task, 'duration_secs': 0.617112} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.758569] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Destroyed the VM [ 835.758868] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 835.759178] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0269ef93-9c76-400a-a775-c22eb1dea7f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.767065] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 835.767065] env[61545]: value = "task-4255748" [ 835.767065] env[61545]: _type = "Task" [ 835.767065] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.777705] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255748, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.920149] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Releasing lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.920149] env[61545]: DEBUG nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Instance network_info: |[{"id": "16667e69-57e6-426e-8b7e-0da6159f84bb", "address": "fa:16:3e:10:6f:d5", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16667e69-57", "ovs_interfaceid": "16667e69-57e6-426e-8b7e-0da6159f84bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 835.920149] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:6f:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16667e69-57e6-426e-8b7e-0da6159f84bb', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 835.929924] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Creating folder: Project (f29228f7495747ca97b16aa485960e14). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 835.930381] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d6b7034-e6e4-442a-bcb8-9a0f1af2a224 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.933432] env[61545]: DEBUG oslo_concurrency.lockutils [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] Releasing lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.933764] env[61545]: DEBUG nova.compute.manager [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Received event network-vif-plugged-16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 835.934022] env[61545]: DEBUG oslo_concurrency.lockutils [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] Acquiring lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.934316] env[61545]: DEBUG oslo_concurrency.lockutils [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] Lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.934546] env[61545]: DEBUG oslo_concurrency.lockutils [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] Lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.934762] env[61545]: DEBUG nova.compute.manager [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] No waiting events found dispatching network-vif-plugged-16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 835.934973] env[61545]: WARNING nova.compute.manager [req-e43e36c0-cab3-4e5b-813d-e99e769657d4 req-1b4fc16e-93cc-4f1e-ae85-43cd98e53a87 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Received unexpected event network-vif-plugged-16667e69-57e6-426e-8b7e-0da6159f84bb for instance with vm_state building and task_state spawning. [ 835.945200] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Created folder: Project (f29228f7495747ca97b16aa485960e14) in parent group-v838542. [ 835.945415] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Creating folder: Instances. Parent ref: group-v838661. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 835.945902] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f59c74d6-0889-4688-82c2-57f10cad37b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.961956] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Created folder: Instances in parent group-v838661. [ 835.961956] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 835.961956] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 835.961956] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3ea6b5c-fe12-441b-8c2b-75ae091fa1cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.990976] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 835.990976] env[61545]: value = "task-4255751" [ 835.990976] env[61545]: _type = "Task" [ 835.990976] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.002623] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255751, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.035336] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255744, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567566} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.035720] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 9cf6dd9e-40e9-4df6-9342-2850e0f93d85/9cf6dd9e-40e9-4df6-9342-2850e0f93d85.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.035854] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.036186] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a80a058-792a-4464-9a86-d5f5761444da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.045615] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 836.045615] env[61545]: value = "task-4255752" [ 836.045615] env[61545]: _type = "Task" [ 836.045615] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.058046] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255752, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.073670] env[61545]: DEBUG nova.scheduler.client.report [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.097060] env[61545]: DEBUG oslo_vmware.api [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255747, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.382355} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.101903] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.102099] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.102314] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.102527] env[61545]: INFO nova.compute.manager [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Took 1.67 seconds to destroy the instance on the hypervisor. [ 836.102822] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.103564] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255746, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.103839] env[61545]: DEBUG nova.compute.manager [-] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 836.103967] env[61545]: DEBUG nova.network.neutron [-] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.151189] env[61545]: DEBUG nova.compute.manager [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 836.179163] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 836.179538] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.179746] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 836.180018] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.180303] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 836.180496] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 836.180750] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 836.180948] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 836.181172] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 836.181389] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 836.181614] env[61545]: DEBUG nova.virt.hardware [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 836.182670] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7825d84b-91c6-4e72-8f95-b79640950169 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.192590] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a73521-fd75-4d61-910e-ff328871aa95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.212012] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.218077] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Creating folder: Project (2430d37bfaae467da3903baeb2ad99b0). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.218853] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba1a6871-9209-4976-9b72-53879d554cc5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.231578] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Created folder: Project (2430d37bfaae467da3903baeb2ad99b0) in parent group-v838542. [ 836.231803] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Creating folder: Instances. Parent ref: group-v838664. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 836.232122] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa928a7e-e8d0-4e15-865a-518729f2a418 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.246040] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Created folder: Instances in parent group-v838664. [ 836.246502] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.246725] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.247431] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87fa385c-067e-46e2-98bd-1cf95412c669 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.273845] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.273845] env[61545]: value = "task-4255755" [ 836.273845] env[61545]: _type = "Task" [ 836.273845] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.287452] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255748, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.295180] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255755, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.485307] env[61545]: INFO nova.compute.manager [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Rescuing [ 836.486043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.486043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.486131] env[61545]: DEBUG nova.network.neutron [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.504442] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255751, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.557689] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255752, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10179} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.557689] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.557884] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0859c2c7-dc51-40b1-977b-00e380555ff7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.583452] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 9cf6dd9e-40e9-4df6-9342-2850e0f93d85/9cf6dd9e-40e9-4df6-9342-2850e0f93d85.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.584916] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.458s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.587448] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c6d07dd-7f36-4922-bb56-c6b6ec447481 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.604522] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.348s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.606851] env[61545]: INFO nova.compute.claims [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.631417] env[61545]: DEBUG oslo_vmware.api [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255746, 'name': PowerOnVM_Task, 'duration_secs': 0.963878} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.633900] env[61545]: INFO nova.scheduler.client.report [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Deleted allocations for instance b2579785-d1a4-48da-ba27-6ee3098578f1 [ 836.636174] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.636436] env[61545]: INFO nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Took 10.90 seconds to spawn the instance on the hypervisor. [ 836.636639] env[61545]: DEBUG nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.637072] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 836.637072] env[61545]: value = "task-4255756" [ 836.637072] env[61545]: _type = "Task" [ 836.637072] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.642834] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891c2586-1b31-47be-a469-6edd52bd7a8a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.662431] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.699525] env[61545]: DEBUG nova.compute.manager [req-cb839a2e-0496-4ffe-8630-8618b9c7d47c req-7af1dc3e-e93b-4f00-8a4b-3543fa921be1 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Received event network-changed-16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 836.699611] env[61545]: DEBUG nova.compute.manager [req-cb839a2e-0496-4ffe-8630-8618b9c7d47c req-7af1dc3e-e93b-4f00-8a4b-3543fa921be1 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Refreshing instance network info cache due to event network-changed-16667e69-57e6-426e-8b7e-0da6159f84bb. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 836.699761] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb839a2e-0496-4ffe-8630-8618b9c7d47c req-7af1dc3e-e93b-4f00-8a4b-3543fa921be1 service nova] Acquiring lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.699907] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb839a2e-0496-4ffe-8630-8618b9c7d47c req-7af1dc3e-e93b-4f00-8a4b-3543fa921be1 service nova] Acquired lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.700352] env[61545]: DEBUG nova.network.neutron [req-cb839a2e-0496-4ffe-8630-8618b9c7d47c req-7af1dc3e-e93b-4f00-8a4b-3543fa921be1 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Refreshing network info cache for port 16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.778948] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255748, 'name': RemoveSnapshot_Task} progress is 31%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.789312] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255755, 'name': CreateVM_Task, 'duration_secs': 0.478376} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.789491] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.789933] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.790169] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.790432] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 836.790691] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dd542ac-d006-418d-ae56-f863809dff9a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.795839] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 836.795839] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522ed773-569b-00ac-e8bf-59b36052c3cd" [ 836.795839] env[61545]: _type = "Task" [ 836.795839] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.807717] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522ed773-569b-00ac-e8bf-59b36052c3cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.003204] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255751, 'name': CreateVM_Task, 'duration_secs': 0.560375} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.003680] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 837.004189] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.144806] env[61545]: DEBUG oslo_concurrency.lockutils [None req-52efc35f-c173-4803-9921-887c11a64e36 tempest-ServersTestFqdnHostnames-1717368552 tempest-ServersTestFqdnHostnames-1717368552-project-member] Lock "b2579785-d1a4-48da-ba27-6ee3098578f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.974s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.157450] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.168466] env[61545]: INFO nova.compute.manager [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Took 53.41 seconds to build instance. [ 837.175133] env[61545]: DEBUG nova.network.neutron [-] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.292526] env[61545]: DEBUG oslo_vmware.api [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255748, 'name': RemoveSnapshot_Task, 'duration_secs': 1.410954} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.293742] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 837.297702] env[61545]: INFO nova.compute.manager [None req-2048fee4-e3e7-4c1f-86cc-205b29c6e5f5 tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Took 18.01 seconds to snapshot the instance on the hypervisor. [ 837.321141] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522ed773-569b-00ac-e8bf-59b36052c3cd, 'name': SearchDatastore_Task, 'duration_secs': 0.042317} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.321141] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.321141] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.321141] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.321141] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.321141] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.321787] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.322164] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 837.322794] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c077f67a-e4ba-4822-b09f-243a158f697b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.329514] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39bb2353-f9be-42d8-8056-396c35a9aeb4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.340457] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 837.340457] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c8d1a-75e0-e42c-1d6d-cd64d895f3d9" [ 837.340457] env[61545]: _type = "Task" [ 837.340457] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.341731] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.341859] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.346526] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a3116f2-6f5c-482a-be99-e31118970b49 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.355231] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c8d1a-75e0-e42c-1d6d-cd64d895f3d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.356973] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 837.356973] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c55c2-6637-007f-4eb0-a9fa03291512" [ 837.356973] env[61545]: _type = "Task" [ 837.356973] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.367229] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c55c2-6637-007f-4eb0-a9fa03291512, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.459087] env[61545]: DEBUG nova.network.neutron [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Updating instance_info_cache with network_info: [{"id": "ab535fb5-b111-46f9-8c40-e9647f50901b", "address": "fa:16:3e:f5:f9:73", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab535fb5-b1", "ovs_interfaceid": "ab535fb5-b111-46f9-8c40-e9647f50901b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.664506] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255756, 'name': ReconfigVM_Task, 'duration_secs': 0.789249} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.664664] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 9cf6dd9e-40e9-4df6-9342-2850e0f93d85/9cf6dd9e-40e9-4df6-9342-2850e0f93d85.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.665227] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05f95787-caa3-414a-931f-b890d4d02484 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.672919] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb857bce-6591-4553-8eba-39f69b9afc2c tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "609ba431-b42b-4b0d-9c16-06e19bee114c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.877s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.672919] env[61545]: DEBUG nova.network.neutron [req-cb839a2e-0496-4ffe-8630-8618b9c7d47c req-7af1dc3e-e93b-4f00-8a4b-3543fa921be1 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Updated VIF entry in instance network info cache for port 16667e69-57e6-426e-8b7e-0da6159f84bb. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.672919] env[61545]: DEBUG nova.network.neutron [req-cb839a2e-0496-4ffe-8630-8618b9c7d47c req-7af1dc3e-e93b-4f00-8a4b-3543fa921be1 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Updating instance_info_cache with network_info: [{"id": "16667e69-57e6-426e-8b7e-0da6159f84bb", "address": "fa:16:3e:10:6f:d5", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16667e69-57", "ovs_interfaceid": "16667e69-57e6-426e-8b7e-0da6159f84bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.676452] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 837.676452] env[61545]: value = "task-4255757" [ 837.676452] env[61545]: _type = "Task" [ 837.676452] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.680534] env[61545]: INFO nova.compute.manager [-] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Took 1.58 seconds to deallocate network for instance. [ 837.692261] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255757, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.869433] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c8d1a-75e0-e42c-1d6d-cd64d895f3d9, 'name': SearchDatastore_Task, 'duration_secs': 0.020289} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.870497] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.870793] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.871023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.879165] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c55c2-6637-007f-4eb0-a9fa03291512, 'name': SearchDatastore_Task, 'duration_secs': 0.028574} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.879628] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1892100f-bfd7-4b3d-a05a-b88e827f3525 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.886710] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 837.886710] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529a40bf-8bc9-9ee5-af31-94b1e69f40f5" [ 837.886710] env[61545]: _type = "Task" [ 837.886710] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.901338] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529a40bf-8bc9-9ee5-af31-94b1e69f40f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.961736] env[61545]: DEBUG oslo_concurrency.lockutils [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.177581] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb839a2e-0496-4ffe-8630-8618b9c7d47c req-7af1dc3e-e93b-4f00-8a4b-3543fa921be1 service nova] Releasing lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.178122] env[61545]: DEBUG nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 838.189599] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.193895] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255757, 'name': Rename_Task, 'duration_secs': 0.409838} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.197023] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.197023] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e52b931a-bc9d-454c-a600-7c61ad6776f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.202437] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 838.202437] env[61545]: value = "task-4255758" [ 838.202437] env[61545]: _type = "Task" [ 838.202437] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.217956] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.279551] env[61545]: INFO nova.compute.manager [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Rescuing [ 838.279824] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.279973] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquired lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.280170] env[61545]: DEBUG nova.network.neutron [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.325737] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf6b73c-7d7d-4a9e-ad93-3d7bd5182764 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.338159] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a64c85-adf7-4606-8199-328eb5fb0769 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.373770] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3d6a12-80ea-4a98-8d20-a8c75b94f388 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.381517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c160b11-40b3-4581-9941-69df260e5a38 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.397908] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529a40bf-8bc9-9ee5-af31-94b1e69f40f5, 'name': SearchDatastore_Task, 'duration_secs': 0.018331} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.407732] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.408372] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 6f2a4514-4de9-427d-91be-f445235696bf/6f2a4514-4de9-427d-91be-f445235696bf.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.408583] env[61545]: DEBUG nova.compute.provider_tree [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.411626] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.411626] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.411626] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-598ee817-bc63-4825-b2d9-efa1584512e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.413196] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f8aad58-994b-447c-883e-9ed7f2f35a62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.422174] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 838.422174] env[61545]: value = "task-4255759" [ 838.422174] env[61545]: _type = "Task" [ 838.422174] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.427628] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.427918] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.429147] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e083aa6c-3658-4535-a3ae-3515288c7ad6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.435752] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255759, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.440203] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 838.440203] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c5237-0802-18e0-75e6-dd3d12eccfe8" [ 838.440203] env[61545]: _type = "Task" [ 838.440203] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.453042] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c5237-0802-18e0-75e6-dd3d12eccfe8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.712793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.717442] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255758, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.915138] env[61545]: DEBUG nova.scheduler.client.report [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.935134] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255759, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.951992] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c5237-0802-18e0-75e6-dd3d12eccfe8, 'name': SearchDatastore_Task, 'duration_secs': 0.013455} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.952903] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-551c3422-9a02-4047-a7cd-6ad8837cca37 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.961677] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 838.961677] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b95b48-6360-bca7-e552-ebe408337fe3" [ 838.961677] env[61545]: _type = "Task" [ 838.961677] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.973669] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b95b48-6360-bca7-e552-ebe408337fe3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.993465] env[61545]: DEBUG nova.compute.manager [req-7ded5b69-6194-4098-a2f0-3a4f79ca2e85 req-55deba59-7b98-40c0-badb-91861781c41c service nova] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Received event network-vif-deleted-5d92ef0a-2647-43af-b441-58b6137c730e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 839.187455] env[61545]: DEBUG nova.network.neutron [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updating instance_info_cache with network_info: [{"id": "a08667b0-f29d-4bd2-8394-b73a26086238", "address": "fa:16:3e:a5:10:28", "network": {"id": "63a7ad86-09da-4985-898c-dda30d7e3d2f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084327320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d9fac909b8894a1d92f0a9bcd9739d15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa08667b0-f2", "ovs_interfaceid": "a08667b0-f29d-4bd2-8394-b73a26086238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.221415] env[61545]: DEBUG oslo_vmware.api [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255758, 'name': PowerOnVM_Task, 'duration_secs': 0.687644} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.221775] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.224019] env[61545]: INFO nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Took 10.51 seconds to spawn the instance on the hypervisor. [ 839.224019] env[61545]: DEBUG nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.224019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4bf44a-9f90-4315-adda-8c5b15ea6fc7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.422351] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.424313] env[61545]: DEBUG nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 839.427203] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.073s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.427694] env[61545]: INFO nova.compute.claims [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.442566] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255759, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657719} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.442967] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 6f2a4514-4de9-427d-91be-f445235696bf/6f2a4514-4de9-427d-91be-f445235696bf.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 839.443118] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 839.443408] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d94983f6-c015-4870-b8f2-da65b37b8b8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.453188] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 839.453188] env[61545]: value = "task-4255760" [ 839.453188] env[61545]: _type = "Task" [ 839.453188] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.464371] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255760, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.478747] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b95b48-6360-bca7-e552-ebe408337fe3, 'name': SearchDatastore_Task, 'duration_secs': 0.049523} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.478747] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.478747] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ced5bde7-07b9-4d07-8b13-49f6fb006eed/ced5bde7-07b9-4d07-8b13-49f6fb006eed.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 839.479063] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6a02802-db0a-4139-8c91-08280052308d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.489257] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 839.489257] env[61545]: value = "task-4255761" [ 839.489257] env[61545]: _type = "Task" [ 839.489257] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.504685] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255761, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.510240] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.512875] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e77fab8e-f017-4ebd-b919-93d85b858da0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.520953] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 839.520953] env[61545]: value = "task-4255762" [ 839.520953] env[61545]: _type = "Task" [ 839.520953] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.540803] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.691659] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Releasing lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.748492] env[61545]: INFO nova.compute.manager [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Took 54.12 seconds to build instance. [ 839.934253] env[61545]: DEBUG nova.compute.utils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 839.936736] env[61545]: DEBUG nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 839.936736] env[61545]: DEBUG nova.network.neutron [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 839.968918] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255760, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120493} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.969258] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.970519] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf575b4-1ca0-4b1f-8718-3876f4065d32 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.002727] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 6f2a4514-4de9-427d-91be-f445235696bf/6f2a4514-4de9-427d-91be-f445235696bf.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 840.006517] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04655c1b-3b7f-4378-a46a-e13a8c94fb5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.025239] env[61545]: DEBUG nova.policy [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '113fc58985704b0b9e0a28be2f61cd68', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9007a6e389c0467c8e2077309984eaab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 840.034771] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255761, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.036833] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 840.036833] env[61545]: value = "task-4255763" [ 840.036833] env[61545]: _type = "Task" [ 840.036833] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.043848] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255762, 'name': PowerOffVM_Task, 'duration_secs': 0.226955} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.044683] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 840.045823] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aea2341-c72b-4151-b834-1a2f2fcbb1a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.053149] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255763, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.073331] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86291231-e5ca-42b3-b423-01ee6627df17 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.114801] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 840.115199] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7bc96ee-97dc-4c79-bead-c08ff78a88b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.124745] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 840.124745] env[61545]: value = "task-4255764" [ 840.124745] env[61545]: _type = "Task" [ 840.124745] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.144258] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 840.144606] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.144946] env[61545]: DEBUG oslo_concurrency.lockutils [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.145228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.145503] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.145850] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6a66039-190a-4ae9-864e-30b0ec473580 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.161142] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.161465] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.162925] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fb32d6b-40e0-4b30-bd04-0fdb18c9c8ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.177141] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 840.177141] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d42c29-4cd4-2d88-8013-427c63187905" [ 840.177141] env[61545]: _type = "Task" [ 840.177141] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.188046] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d42c29-4cd4-2d88-8013-427c63187905, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.252235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8787b054-00b4-44df-8989-6938469dfd5e tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.352s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.441407] env[61545]: DEBUG nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 840.516846] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255761, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64255} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.517334] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ced5bde7-07b9-4d07-8b13-49f6fb006eed/ced5bde7-07b9-4d07-8b13-49f6fb006eed.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 840.517845] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.518943] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4690129-9a7e-4f3d-9879-126fd1983cac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.532666] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 840.532666] env[61545]: value = "task-4255765" [ 840.532666] env[61545]: _type = "Task" [ 840.532666] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.556740] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255765, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.563941] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255763, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.688606] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d42c29-4cd4-2d88-8013-427c63187905, 'name': SearchDatastore_Task, 'duration_secs': 0.020921} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.689452] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bd854f2-1f2a-449b-a84c-100d6652d82e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.697036] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 840.697036] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529cc9ff-eef4-029b-6908-d33c4b25d09e" [ 840.697036] env[61545]: _type = "Task" [ 840.697036] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.702213] env[61545]: DEBUG nova.compute.manager [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 840.708696] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad9c9bf-4395-439f-8281-2e9fc53a63b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.712629] env[61545]: DEBUG nova.network.neutron [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Successfully created port: 1db9311b-fde5-4366-81bb-0717451a8c25 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.725988] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529cc9ff-eef4-029b-6908-d33c4b25d09e, 'name': SearchDatastore_Task, 'duration_secs': 0.016005} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.728477] env[61545]: DEBUG oslo_concurrency.lockutils [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.729083] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. {{(pid=61545) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 840.729264] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4beb3b2-7421-4e88-873a-4536881654a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.743093] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 840.743093] env[61545]: value = "task-4255766" [ 840.743093] env[61545]: _type = "Task" [ 840.743093] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.753896] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.758346] env[61545]: DEBUG nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.050777] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255765, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096697} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.053655] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.053655] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadc3d56-7c1a-4a28-bdb7-6eee0d97be39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.064402] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255763, 'name': ReconfigVM_Task, 'duration_secs': 0.736018} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.065431] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 6f2a4514-4de9-427d-91be-f445235696bf/6f2a4514-4de9-427d-91be-f445235696bf.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.066135] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a7a09ca-4bde-445a-902d-2948abebf51f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.091751] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] ced5bde7-07b9-4d07-8b13-49f6fb006eed/ced5bde7-07b9-4d07-8b13-49f6fb006eed.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.095850] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63b25c22-760c-4772-9195-085ea04f24be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.113371] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 841.113371] env[61545]: value = "task-4255767" [ 841.113371] env[61545]: _type = "Task" [ 841.113371] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.122133] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 841.122133] env[61545]: value = "task-4255768" [ 841.122133] env[61545]: _type = "Task" [ 841.122133] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.130344] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255767, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.144545] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255768, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.231394] env[61545]: INFO nova.compute.manager [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] instance snapshotting [ 841.240196] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ef1c99-9027-4d98-849d-67f2fe6a7711 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.274471] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.276307] env[61545]: DEBUG nova.compute.manager [req-bb3b656c-4332-42ad-8c1c-8c359b17a76d req-3d371744-1da2-4512-9af7-45a491430ec4 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Received event network-changed-3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 841.276307] env[61545]: DEBUG nova.compute.manager [req-bb3b656c-4332-42ad-8c1c-8c359b17a76d req-3d371744-1da2-4512-9af7-45a491430ec4 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Refreshing instance network info cache due to event network-changed-3f0de1dd-63b8-4054-827e-0daae86eaaa5. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 841.276307] env[61545]: DEBUG oslo_concurrency.lockutils [req-bb3b656c-4332-42ad-8c1c-8c359b17a76d req-3d371744-1da2-4512-9af7-45a491430ec4 service nova] Acquiring lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.276307] env[61545]: DEBUG oslo_concurrency.lockutils [req-bb3b656c-4332-42ad-8c1c-8c359b17a76d req-3d371744-1da2-4512-9af7-45a491430ec4 service nova] Acquired lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.276307] env[61545]: DEBUG nova.network.neutron [req-bb3b656c-4332-42ad-8c1c-8c359b17a76d req-3d371744-1da2-4512-9af7-45a491430ec4 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Refreshing network info cache for port 3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.285874] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7cb0cfbe-3a5e-4928-b872-ae4b195838e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.289630] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2825f88-5a13-4b14-b778-46fe9d5fe7a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.304310] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255766, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.317383] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 841.317383] env[61545]: value = "task-4255769" [ 841.317383] env[61545]: _type = "Task" [ 841.317383] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.324146] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.332397] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255769, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.379115] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e26292-575f-4fd5-b410-58043dbf819b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.389560] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcc4cf2-0b8c-42c1-8b9a-f030aab0e801 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.425979] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3200151c-c8b5-4d61-af37-4f6e3d3adea7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.438074] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02edb14b-4dc5-4688-863d-8369e3d3c2bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.455029] env[61545]: DEBUG nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 841.457843] env[61545]: DEBUG nova.compute.provider_tree [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.510147] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 841.510928] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.510928] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 841.511111] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.511489] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 841.511631] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 841.511982] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 841.512314] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 841.512533] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 841.512804] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 841.513078] env[61545]: DEBUG nova.virt.hardware [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 841.515124] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf90d23-fda6-4bbd-a3e8-dff15c5af926 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.529732] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e90f4d8-4bfe-473a-8408-30b9f91f938a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.627720] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255767, 'name': Rename_Task, 'duration_secs': 0.296582} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.631758] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.632217] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c97382ef-f363-4270-a777-fe1d02cc7baf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.642886] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255768, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.645992] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 841.645992] env[61545]: value = "task-4255770" [ 841.645992] env[61545]: _type = "Task" [ 841.645992] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.656288] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.754171] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.930095} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.754496] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. [ 841.755309] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06545ea6-0888-4e21-a828-ce16363ce1ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.783244] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.783595] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-742d6f8a-d2af-47e4-8a91-eb724b93d6f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.807093] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 841.807093] env[61545]: value = "task-4255771" [ 841.807093] env[61545]: _type = "Task" [ 841.807093] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.819097] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 841.819464] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255771, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.819751] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-011998f5-3f8e-4e22-8947-75b8f993e0cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.833408] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255769, 'name': PowerOffVM_Task, 'duration_secs': 0.464333} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.835419] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.835874] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 841.835874] env[61545]: value = "task-4255772" [ 841.835874] env[61545]: _type = "Task" [ 841.835874] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.836763] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2365fd-1c66-4388-af82-9ad1e918813a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.850765] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255772, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.870283] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e694bb6a-d0bd-4733-b281-dbc9664f7f5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.908118] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.908352] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa1e61a1-1400-400b-a91d-331aa5fd480f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.916727] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 841.916727] env[61545]: value = "task-4255773" [ 841.916727] env[61545]: _type = "Task" [ 841.916727] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.929815] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255773, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.961170] env[61545]: DEBUG nova.scheduler.client.report [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 842.138140] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255768, 'name': ReconfigVM_Task, 'duration_secs': 0.771055} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.138612] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Reconfigured VM instance instance-0000002b to attach disk [datastore2] ced5bde7-07b9-4d07-8b13-49f6fb006eed/ced5bde7-07b9-4d07-8b13-49f6fb006eed.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.139104] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4567943e-9ca3-40c5-ab39-74d6126180c7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.161715] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255770, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.163633] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 842.163633] env[61545]: value = "task-4255774" [ 842.163633] env[61545]: _type = "Task" [ 842.163633] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.172974] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255774, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.237945] env[61545]: DEBUG nova.network.neutron [req-bb3b656c-4332-42ad-8c1c-8c359b17a76d req-3d371744-1da2-4512-9af7-45a491430ec4 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Updated VIF entry in instance network info cache for port 3f0de1dd-63b8-4054-827e-0daae86eaaa5. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.238307] env[61545]: DEBUG nova.network.neutron [req-bb3b656c-4332-42ad-8c1c-8c359b17a76d req-3d371744-1da2-4512-9af7-45a491430ec4 service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Updating instance_info_cache with network_info: [{"id": "3f0de1dd-63b8-4054-827e-0daae86eaaa5", "address": "fa:16:3e:a4:0a:73", "network": {"id": "07247a37-d94b-40cb-a352-293d3390a79f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-512329380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ef718e3a64a50b3c40ff4bc29e673", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee018eb-75be-4037-a80a-07034d4eae35", "external-id": "nsx-vlan-transportzone-8", "segmentation_id": 8, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0de1dd-63", "ovs_interfaceid": "3f0de1dd-63b8-4054-827e-0daae86eaaa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.320884] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255771, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.351018] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255772, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.428272] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 842.428501] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.428770] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.428921] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.429115] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.429381] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77a3b87c-30ce-452b-9387-2f319b5197d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.440968] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.441185] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.441944] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-192d2f7a-c86f-404a-9544-eb71991c7b7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.448017] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 842.448017] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d98970-a168-1366-da33-216af3237c26" [ 842.448017] env[61545]: _type = "Task" [ 842.448017] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.457270] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d98970-a168-1366-da33-216af3237c26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.468327] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.042s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.468910] env[61545]: DEBUG nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 842.471743] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.568s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.471976] env[61545]: DEBUG nova.objects.instance [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lazy-loading 'resources' on Instance uuid a127cc27-7155-4a7a-871a-c3e67a99bfc8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.544778] env[61545]: DEBUG nova.compute.manager [req-a0ca4750-eb54-4343-a4f1-3a3f360f719d req-13d82269-ca79-4e84-8200-7eb0c826e0ca service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Received event network-vif-plugged-1db9311b-fde5-4366-81bb-0717451a8c25 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 842.545076] env[61545]: DEBUG oslo_concurrency.lockutils [req-a0ca4750-eb54-4343-a4f1-3a3f360f719d req-13d82269-ca79-4e84-8200-7eb0c826e0ca service nova] Acquiring lock "13db992b-db13-451f-a853-9b7de28b9184-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.546148] env[61545]: DEBUG oslo_concurrency.lockutils [req-a0ca4750-eb54-4343-a4f1-3a3f360f719d req-13d82269-ca79-4e84-8200-7eb0c826e0ca service nova] Lock "13db992b-db13-451f-a853-9b7de28b9184-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.546455] env[61545]: DEBUG oslo_concurrency.lockutils [req-a0ca4750-eb54-4343-a4f1-3a3f360f719d req-13d82269-ca79-4e84-8200-7eb0c826e0ca service nova] Lock "13db992b-db13-451f-a853-9b7de28b9184-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.546900] env[61545]: DEBUG nova.compute.manager [req-a0ca4750-eb54-4343-a4f1-3a3f360f719d req-13d82269-ca79-4e84-8200-7eb0c826e0ca service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] No waiting events found dispatching network-vif-plugged-1db9311b-fde5-4366-81bb-0717451a8c25 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 842.547146] env[61545]: WARNING nova.compute.manager [req-a0ca4750-eb54-4343-a4f1-3a3f360f719d req-13d82269-ca79-4e84-8200-7eb0c826e0ca service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Received unexpected event network-vif-plugged-1db9311b-fde5-4366-81bb-0717451a8c25 for instance with vm_state building and task_state spawning. [ 842.657380] env[61545]: DEBUG oslo_vmware.api [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255770, 'name': PowerOnVM_Task, 'duration_secs': 0.549093} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.657748] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.657816] env[61545]: INFO nova.compute.manager [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Took 6.51 seconds to spawn the instance on the hypervisor. [ 842.658059] env[61545]: DEBUG nova.compute.manager [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 842.659016] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cded2b-f639-44dd-a01b-a917f7e8d420 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.686722] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255774, 'name': Rename_Task, 'duration_secs': 0.473961} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.689102] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 842.689102] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b21a3816-36d9-4515-9cdc-a37656d748b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.694869] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 842.694869] env[61545]: value = "task-4255775" [ 842.694869] env[61545]: _type = "Task" [ 842.694869] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.705850] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255775, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.715275] env[61545]: DEBUG nova.network.neutron [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Successfully updated port: 1db9311b-fde5-4366-81bb-0717451a8c25 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.747399] env[61545]: DEBUG oslo_concurrency.lockutils [req-bb3b656c-4332-42ad-8c1c-8c359b17a76d req-3d371744-1da2-4512-9af7-45a491430ec4 service nova] Releasing lock "refresh_cache-9cf6dd9e-40e9-4df6-9342-2850e0f93d85" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.823884] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255771, 'name': ReconfigVM_Task, 'duration_secs': 0.824692} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.824384] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.825416] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316b639a-bbba-4c7c-98ac-14f0c779a045 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.860028] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20927242-2bca-4e11-958a-8b5df27c1c09 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.881771] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255772, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.881771] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 842.881771] env[61545]: value = "task-4255776" [ 842.881771] env[61545]: _type = "Task" [ 842.881771] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.889679] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255776, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.959840] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d98970-a168-1366-da33-216af3237c26, 'name': SearchDatastore_Task, 'duration_secs': 0.028069} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.960828] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a320ae8-187f-4d68-9cd5-0c2ea09d37c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.970028] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 842.970028] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fcb7fe-d7ef-6baa-6bd0-5b240c89387b" [ 842.970028] env[61545]: _type = "Task" [ 842.970028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.978412] env[61545]: DEBUG nova.compute.utils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 842.983515] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fcb7fe-d7ef-6baa-6bd0-5b240c89387b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.984457] env[61545]: DEBUG nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 842.984739] env[61545]: DEBUG nova.network.neutron [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 843.028513] env[61545]: DEBUG nova.policy [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25fd6c8662bd4b7f9da546ec78acda02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68a860104885480d9da472bc969ba6d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 843.187040] env[61545]: INFO nova.compute.manager [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Took 44.54 seconds to build instance. [ 843.211396] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255775, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.218403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "refresh_cache-13db992b-db13-451f-a853-9b7de28b9184" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.218609] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "refresh_cache-13db992b-db13-451f-a853-9b7de28b9184" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.218797] env[61545]: DEBUG nova.network.neutron [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.360661] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255772, 'name': CreateSnapshot_Task, 'duration_secs': 1.235277} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.360939] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 843.364968] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecca444-6dff-4fb7-811c-a35ce4a955bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.395931] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255776, 'name': ReconfigVM_Task, 'duration_secs': 0.223352} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.400912] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.400912] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-928d58f2-0fa3-4e9d-bdd1-8928c7e7589e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.410027] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 843.410027] env[61545]: value = "task-4255777" [ 843.410027] env[61545]: _type = "Task" [ 843.410027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.419051] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255777, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.480416] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fcb7fe-d7ef-6baa-6bd0-5b240c89387b, 'name': SearchDatastore_Task, 'duration_secs': 0.010414} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.481135] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.481135] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. {{(pid=61545) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 843.481135] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7d17902-bcca-439d-b877-bd32622aeb90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.487564] env[61545]: DEBUG nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 843.499416] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 843.499416] env[61545]: value = "task-4255778" [ 843.499416] env[61545]: _type = "Task" [ 843.499416] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.513294] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255778, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.525179] env[61545]: DEBUG nova.network.neutron [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Successfully created port: f5ad4055-0195-4342-9493-6b942aab9f3e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.690133] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71349279-cea7-4c9c-aff1-2b9ecf10b625 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "6f2a4514-4de9-427d-91be-f445235696bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.358s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.706585] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255775, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.726645] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638cc559-07f0-48d0-a081-d247bf0eda2d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.736547] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e87476-dfba-4b10-9c24-4051bbedae59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.769438] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0054331a-adcc-4b6b-a18e-ab9d7df2a399 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.779704] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29eecbd-5328-47d1-afd2-7cc5e1835bb9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.796945] env[61545]: DEBUG nova.compute.provider_tree [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.799228] env[61545]: DEBUG nova.network.neutron [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.893301] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 843.894413] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2d628058-b2b2-4b51-870f-0f661d2003e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.908159] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 843.908159] env[61545]: value = "task-4255779" [ 843.908159] env[61545]: _type = "Task" [ 843.908159] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.921758] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.925433] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255777, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.013769] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255778, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.167901] env[61545]: DEBUG nova.network.neutron [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Updating instance_info_cache with network_info: [{"id": "1db9311b-fde5-4366-81bb-0717451a8c25", "address": "fa:16:3e:b2:47:cb", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1db9311b-fd", "ovs_interfaceid": "1db9311b-fde5-4366-81bb-0717451a8c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.193262] env[61545]: DEBUG nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 844.214241] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255775, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.303643] env[61545]: DEBUG nova.scheduler.client.report [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.426678] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.426959] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255777, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.507657] env[61545]: DEBUG nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 844.517042] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255778, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701477} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.517042] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. [ 844.517374] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30827824-bafa-4fbb-8907-77e61a9d93fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.548376] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.553033] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54023136-4dcf-4244-aa8b-a33a67bcf6e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.572324] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 844.572324] env[61545]: value = "task-4255780" [ 844.572324] env[61545]: _type = "Task" [ 844.572324] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.574677] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.574677] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.574677] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.574677] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.575025] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.575025] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.575208] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.575396] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.575605] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.575726] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.575900] env[61545]: DEBUG nova.virt.hardware [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.577096] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe8f5ec-74e1-48d6-ae9b-033b7051c422 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.592941] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.597081] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cf846d-0a69-4401-b1a0-0f23ba497c62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.671294] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "refresh_cache-13db992b-db13-451f-a853-9b7de28b9184" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.674960] env[61545]: DEBUG nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Instance network_info: |[{"id": "1db9311b-fde5-4366-81bb-0717451a8c25", "address": "fa:16:3e:b2:47:cb", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1db9311b-fd", "ovs_interfaceid": "1db9311b-fde5-4366-81bb-0717451a8c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 844.674960] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:47:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d0c6fd7-3cc9-4818-9475-8f15900394cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1db9311b-fde5-4366-81bb-0717451a8c25', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.685940] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.686417] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.686760] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10955ed0-a899-49ba-9ccb-cbfaad308701 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.742406] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255775, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.747515] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.747515] env[61545]: value = "task-4255781" [ 844.747515] env[61545]: _type = "Task" [ 844.747515] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.755207] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.759433] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255781, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.813170] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.341s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.816079] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.025s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.816435] env[61545]: DEBUG nova.objects.instance [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lazy-loading 'resources' on Instance uuid d7ed99e5-3f96-4053-9b9a-a4b7edb1f351 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.843956] env[61545]: INFO nova.scheduler.client.report [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Deleted allocations for instance a127cc27-7155-4a7a-871a-c3e67a99bfc8 [ 844.930686] env[61545]: DEBUG oslo_vmware.api [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255777, 'name': PowerOnVM_Task, 'duration_secs': 1.096251} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.930951] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.931313] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.935471] env[61545]: DEBUG nova.compute.manager [None req-76b87fd5-8d50-4ea8-9d39-20b398190a83 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.936623] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be858ee-c61d-41b3-a73e-58b4d2937596 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.032337] env[61545]: DEBUG nova.compute.manager [req-849ce1c7-e63d-4b97-9e3e-9013b04cd5ab req-c9e42f43-e688-476d-ba5b-a72e952588d2 service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Received event network-changed-1db9311b-fde5-4366-81bb-0717451a8c25 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 845.032337] env[61545]: DEBUG nova.compute.manager [req-849ce1c7-e63d-4b97-9e3e-9013b04cd5ab req-c9e42f43-e688-476d-ba5b-a72e952588d2 service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Refreshing instance network info cache due to event network-changed-1db9311b-fde5-4366-81bb-0717451a8c25. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 845.032337] env[61545]: DEBUG oslo_concurrency.lockutils [req-849ce1c7-e63d-4b97-9e3e-9013b04cd5ab req-c9e42f43-e688-476d-ba5b-a72e952588d2 service nova] Acquiring lock "refresh_cache-13db992b-db13-451f-a853-9b7de28b9184" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.032691] env[61545]: DEBUG oslo_concurrency.lockutils [req-849ce1c7-e63d-4b97-9e3e-9013b04cd5ab req-c9e42f43-e688-476d-ba5b-a72e952588d2 service nova] Acquired lock "refresh_cache-13db992b-db13-451f-a853-9b7de28b9184" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.033302] env[61545]: DEBUG nova.network.neutron [req-849ce1c7-e63d-4b97-9e3e-9013b04cd5ab req-c9e42f43-e688-476d-ba5b-a72e952588d2 service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Refreshing network info cache for port 1db9311b-fde5-4366-81bb-0717451a8c25 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.090990] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.226790] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255775, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.235951] env[61545]: DEBUG nova.compute.manager [None req-86f98704-11de-4035-9f82-355e63bdea12 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.237108] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9e5064-46ad-4494-9ce4-8d5498ab7951 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.260427] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255781, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.354991] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04625911-ce02-41fe-9844-53ff0cd3e991 tempest-ServerPasswordTestJSON-1891337064 tempest-ServerPasswordTestJSON-1891337064-project-member] Lock "a127cc27-7155-4a7a-871a-c3e67a99bfc8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.556s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.419823] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.588246] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255780, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.661745] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquiring lock "6f2a4514-4de9-427d-91be-f445235696bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.662574] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "6f2a4514-4de9-427d-91be-f445235696bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.662574] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquiring lock "6f2a4514-4de9-427d-91be-f445235696bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.662574] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "6f2a4514-4de9-427d-91be-f445235696bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.663689] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "6f2a4514-4de9-427d-91be-f445235696bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.668640] env[61545]: INFO nova.compute.manager [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Terminating instance [ 845.727853] env[61545]: DEBUG oslo_vmware.api [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4255775, 'name': PowerOnVM_Task, 'duration_secs': 2.611114} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.728233] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.728723] env[61545]: INFO nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Took 13.98 seconds to spawn the instance on the hypervisor. [ 845.728906] env[61545]: DEBUG nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.729991] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e69924-7efb-450d-8721-9c9edc89bcb7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.751586] env[61545]: INFO nova.compute.manager [None req-86f98704-11de-4035-9f82-355e63bdea12 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] instance snapshotting [ 845.751586] env[61545]: DEBUG nova.objects.instance [None req-86f98704-11de-4035-9f82-355e63bdea12 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lazy-loading 'flavor' on Instance uuid 6f2a4514-4de9-427d-91be-f445235696bf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.762165] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255781, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.925899] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.032280] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc582626-ac4c-4cf0-bcb1-825ba82a49dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.041374] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdfc296-8be8-4199-9d1d-5fcadf325675 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.076483] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ed22f9-2c83-4f4e-b40e-ab4234af55e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.094460] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255780, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.095832] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a3c4ef-0038-4455-8938-979071505f7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.112772] env[61545]: DEBUG nova.compute.provider_tree [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.177049] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquiring lock "refresh_cache-6f2a4514-4de9-427d-91be-f445235696bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.177136] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquired lock "refresh_cache-6f2a4514-4de9-427d-91be-f445235696bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.177318] env[61545]: DEBUG nova.network.neutron [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.237193] env[61545]: DEBUG nova.network.neutron [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Successfully updated port: f5ad4055-0195-4342-9493-6b942aab9f3e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.250941] env[61545]: INFO nova.compute.manager [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Took 53.21 seconds to build instance. [ 846.260274] env[61545]: DEBUG nova.network.neutron [req-849ce1c7-e63d-4b97-9e3e-9013b04cd5ab req-c9e42f43-e688-476d-ba5b-a72e952588d2 service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Updated VIF entry in instance network info cache for port 1db9311b-fde5-4366-81bb-0717451a8c25. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 846.260620] env[61545]: DEBUG nova.network.neutron [req-849ce1c7-e63d-4b97-9e3e-9013b04cd5ab req-c9e42f43-e688-476d-ba5b-a72e952588d2 service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Updating instance_info_cache with network_info: [{"id": "1db9311b-fde5-4366-81bb-0717451a8c25", "address": "fa:16:3e:b2:47:cb", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1db9311b-fd", "ovs_interfaceid": "1db9311b-fde5-4366-81bb-0717451a8c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.268552] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b8465b-1a3a-470b-a1bd-bdfe0c597a49 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.281943] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255781, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.300853] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc48dee-e27f-4ac5-88cd-fad61110937f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.426340] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.590822] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255780, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.615273] env[61545]: DEBUG nova.scheduler.client.report [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 846.711656] env[61545]: DEBUG nova.network.neutron [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.742338] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-60edf62d-3fb8-4d85-9a4e-ef71c565d940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.742338] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-60edf62d-3fb8-4d85-9a4e-ef71c565d940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.742483] env[61545]: DEBUG nova.network.neutron [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.762712] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fceed2b-d8c6-46fe-9364-3f08ef055bc7 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.286s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.763037] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255781, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.765207] env[61545]: DEBUG oslo_concurrency.lockutils [req-849ce1c7-e63d-4b97-9e3e-9013b04cd5ab req-c9e42f43-e688-476d-ba5b-a72e952588d2 service nova] Releasing lock "refresh_cache-13db992b-db13-451f-a853-9b7de28b9184" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.808385] env[61545]: DEBUG nova.network.neutron [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.816258] env[61545]: DEBUG nova.compute.manager [None req-86f98704-11de-4035-9f82-355e63bdea12 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Instance disappeared during snapshot {{(pid=61545) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 846.822087] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.822524] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.923404] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.983013] env[61545]: DEBUG nova.compute.manager [None req-86f98704-11de-4035-9f82-355e63bdea12 tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Found 0 images (rotation: 2) {{(pid=61545) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 847.092884] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255780, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.122805] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.306s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.124595] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.572s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.126097] env[61545]: INFO nova.compute.claims [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.150687] env[61545]: DEBUG nova.compute.manager [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Received event network-vif-plugged-f5ad4055-0195-4342-9493-6b942aab9f3e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 847.150974] env[61545]: DEBUG oslo_concurrency.lockutils [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] Acquiring lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.151295] env[61545]: DEBUG oslo_concurrency.lockutils [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] Lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.151502] env[61545]: DEBUG oslo_concurrency.lockutils [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] Lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.151628] env[61545]: DEBUG nova.compute.manager [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] No waiting events found dispatching network-vif-plugged-f5ad4055-0195-4342-9493-6b942aab9f3e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 847.151806] env[61545]: WARNING nova.compute.manager [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Received unexpected event network-vif-plugged-f5ad4055-0195-4342-9493-6b942aab9f3e for instance with vm_state building and task_state spawning. [ 847.151970] env[61545]: DEBUG nova.compute.manager [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Received event network-changed-f5ad4055-0195-4342-9493-6b942aab9f3e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 847.153468] env[61545]: DEBUG nova.compute.manager [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Refreshing instance network info cache due to event network-changed-f5ad4055-0195-4342-9493-6b942aab9f3e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 847.153698] env[61545]: DEBUG oslo_concurrency.lockutils [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] Acquiring lock "refresh_cache-60edf62d-3fb8-4d85-9a4e-ef71c565d940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.158466] env[61545]: INFO nova.scheduler.client.report [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Deleted allocations for instance d7ed99e5-3f96-4053-9b9a-a4b7edb1f351 [ 847.262894] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255781, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.266859] env[61545]: DEBUG nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.315991] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Releasing lock "refresh_cache-6f2a4514-4de9-427d-91be-f445235696bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.316464] env[61545]: DEBUG nova.compute.manager [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 847.316692] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.318154] env[61545]: DEBUG nova.network.neutron [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.321066] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18180cae-6c51-48fc-9690-deadb2d8a717 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.330583] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 847.330880] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f9d96b5-902a-418a-9d04-dba983f43eeb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.339152] env[61545]: DEBUG oslo_vmware.api [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 847.339152] env[61545]: value = "task-4255782" [ 847.339152] env[61545]: _type = "Task" [ 847.339152] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.348303] env[61545]: DEBUG oslo_vmware.api [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.422437] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.564791] env[61545]: INFO nova.compute.manager [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Unrescuing [ 847.565244] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.565498] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.565772] env[61545]: DEBUG nova.network.neutron [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.593678] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255780, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.630944] env[61545]: DEBUG nova.network.neutron [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Updating instance_info_cache with network_info: [{"id": "f5ad4055-0195-4342-9493-6b942aab9f3e", "address": "fa:16:3e:0e:a9:fa", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5ad4055-01", "ovs_interfaceid": "f5ad4055-0195-4342-9493-6b942aab9f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.668835] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d1c13d-c9c1-4e73-b335-92185ef305ac tempest-FloatingIPsAssociationTestJSON-741219197 tempest-FloatingIPsAssociationTestJSON-741219197-project-member] Lock "d7ed99e5-3f96-4053-9b9a-a4b7edb1f351" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.611s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.779149] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255781, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.802844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.850823] env[61545]: DEBUG oslo_vmware.api [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255782, 'name': PowerOffVM_Task, 'duration_secs': 0.429189} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.851135] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.851315] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.851608] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b061444f-1dd8-4145-ac4b-d9f12e8b613e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.886188] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.886252] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.886458] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Deleting the datastore file [datastore2] 6f2a4514-4de9-427d-91be-f445235696bf {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.886763] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b1c5a99-a7a6-4b3a-b752-a96067f761d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.898064] env[61545]: DEBUG oslo_vmware.api [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for the task: (returnval){ [ 847.898064] env[61545]: value = "task-4255784" [ 847.898064] env[61545]: _type = "Task" [ 847.898064] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.906345] env[61545]: DEBUG oslo_vmware.api [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.922496] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255779, 'name': CloneVM_Task, 'duration_secs': 3.853309} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.922808] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Created linked-clone VM from snapshot [ 847.923642] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7801daa-af64-49dc-8d96-2fb280914c89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.932278] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Uploading image 09fddafd-8bca-4e4b-a0a8-79b436d4e27e {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 847.962635] env[61545]: DEBUG oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 847.962635] env[61545]: value = "vm-838668" [ 847.962635] env[61545]: _type = "VirtualMachine" [ 847.962635] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 847.963013] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-18d6ae97-17c7-4ccd-b4b5-53e59792e285 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.971588] env[61545]: DEBUG oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lease: (returnval){ [ 847.971588] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52116b01-34df-8893-4520-d59555c65886" [ 847.971588] env[61545]: _type = "HttpNfcLease" [ 847.971588] env[61545]: } obtained for exporting VM: (result){ [ 847.971588] env[61545]: value = "vm-838668" [ 847.971588] env[61545]: _type = "VirtualMachine" [ 847.971588] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 847.972054] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the lease: (returnval){ [ 847.972054] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52116b01-34df-8893-4520-d59555c65886" [ 847.972054] env[61545]: _type = "HttpNfcLease" [ 847.972054] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 847.981389] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 847.981389] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52116b01-34df-8893-4520-d59555c65886" [ 847.981389] env[61545]: _type = "HttpNfcLease" [ 847.981389] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 848.092943] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255780, 'name': ReconfigVM_Task, 'duration_secs': 3.112122} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.093380] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.094317] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2462415-6aff-4c21-bbce-baf36403006d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.125411] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38b8b89b-1e2e-4b62-9b0e-fa5ddac61b81 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.137431] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-60edf62d-3fb8-4d85-9a4e-ef71c565d940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.137761] env[61545]: DEBUG nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Instance network_info: |[{"id": "f5ad4055-0195-4342-9493-6b942aab9f3e", "address": "fa:16:3e:0e:a9:fa", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5ad4055-01", "ovs_interfaceid": "f5ad4055-0195-4342-9493-6b942aab9f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 848.138082] env[61545]: DEBUG oslo_concurrency.lockutils [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] Acquired lock "refresh_cache-60edf62d-3fb8-4d85-9a4e-ef71c565d940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.138703] env[61545]: DEBUG nova.network.neutron [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Refreshing network info cache for port f5ad4055-0195-4342-9493-6b942aab9f3e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 848.142911] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:a9:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5ad4055-0195-4342-9493-6b942aab9f3e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.152641] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating folder: Project (68a860104885480d9da472bc969ba6d1). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.152641] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05b42150-b1c9-41b3-9185-f19fbc076cc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.161755] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 848.161755] env[61545]: value = "task-4255786" [ 848.161755] env[61545]: _type = "Task" [ 848.161755] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.168040] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Created folder: Project (68a860104885480d9da472bc969ba6d1) in parent group-v838542. [ 848.168253] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating folder: Instances. Parent ref: group-v838670. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.168946] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20572e56-86c1-41c2-b761-b5f689b7be52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.176139] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255786, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.187540] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Created folder: Instances in parent group-v838670. [ 848.187838] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.188040] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 848.188269] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58f734de-e16c-46e5-b6e4-6c3ac22bfe99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.214540] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.214540] env[61545]: value = "task-4255789" [ 848.214540] env[61545]: _type = "Task" [ 848.214540] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.230692] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255789, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.270578] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255781, 'name': CreateVM_Task, 'duration_secs': 3.05056} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.270578] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.270578] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.270578] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.270578] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 848.270578] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f17bbcce-cc34-43cc-bbad-a82e2d4378c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.277320] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 848.277320] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5273098c-2998-7548-d0f7-b18486c626c9" [ 848.277320] env[61545]: _type = "Task" [ 848.277320] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.289195] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5273098c-2998-7548-d0f7-b18486c626c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.409920] env[61545]: DEBUG oslo_vmware.api [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Task: {'id': task-4255784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190105} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.409920] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.409920] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.409920] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.409920] env[61545]: INFO nova.compute.manager [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Took 1.09 seconds to destroy the instance on the hypervisor. [ 848.409920] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.409920] env[61545]: DEBUG nova.compute.manager [-] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 848.409920] env[61545]: DEBUG nova.network.neutron [-] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.457270] env[61545]: DEBUG nova.network.neutron [-] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.482637] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 848.482637] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52116b01-34df-8893-4520-d59555c65886" [ 848.482637] env[61545]: _type = "HttpNfcLease" [ 848.482637] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 848.486053] env[61545]: DEBUG oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 848.486053] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52116b01-34df-8893-4520-d59555c65886" [ 848.486053] env[61545]: _type = "HttpNfcLease" [ 848.486053] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 848.487878] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3073a7-32c1-4181-bccb-8f6a7239f50a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.500755] env[61545]: DEBUG oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c38b-b3f7-2b3a-872f-767a899d06d1/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 848.501034] env[61545]: DEBUG oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c38b-b3f7-2b3a-872f-767a899d06d1/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 848.647428] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2c4758a9-2e05-427e-a516-3970700ca089 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.673917] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255786, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.728721] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255789, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.790095] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5273098c-2998-7548-d0f7-b18486c626c9, 'name': SearchDatastore_Task, 'duration_secs': 0.019029} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.793303] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.793791] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.794042] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.794259] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.794502] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.795048] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-223e5dc6-55f4-4a0a-8197-6ed7f1b7e5b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.809507] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.809740] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.813768] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f6b5109-50ea-45fd-9292-763a0efcc865 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.819581] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 848.819581] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b0359e-efad-bd77-4276-5ccb3db30ffe" [ 848.819581] env[61545]: _type = "Task" [ 848.819581] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.829200] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b0359e-efad-bd77-4276-5ccb3db30ffe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.851216] env[61545]: DEBUG nova.network.neutron [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Updating instance_info_cache with network_info: [{"id": "ab535fb5-b111-46f9-8c40-e9647f50901b", "address": "fa:16:3e:f5:f9:73", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab535fb5-b1", "ovs_interfaceid": "ab535fb5-b111-46f9-8c40-e9647f50901b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.919151] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1db8812-6309-454e-a4a4-fa55078c79e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.931897] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6c953e-6d36-4af6-8700-2a180faa34d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.967887] env[61545]: DEBUG nova.network.neutron [-] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.973911] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829f1508-45eb-4ac0-b771-2d2ce245c300 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.990834] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b609ccce-4393-447a-a26f-b595ea16eb92 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.009419] env[61545]: DEBUG nova.compute.provider_tree [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.177298] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255786, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.227382] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255789, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.303525] env[61545]: DEBUG nova.network.neutron [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Updated VIF entry in instance network info cache for port f5ad4055-0195-4342-9493-6b942aab9f3e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 849.303986] env[61545]: DEBUG nova.network.neutron [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Updating instance_info_cache with network_info: [{"id": "f5ad4055-0195-4342-9493-6b942aab9f3e", "address": "fa:16:3e:0e:a9:fa", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5ad4055-01", "ovs_interfaceid": "f5ad4055-0195-4342-9493-6b942aab9f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.335307] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b0359e-efad-bd77-4276-5ccb3db30ffe, 'name': SearchDatastore_Task, 'duration_secs': 0.026391} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.336888] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c904a623-1035-4460-9a4f-2a0ded1945cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.344159] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 849.344159] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52862b84-75df-314c-b442-cdfa22c493cc" [ 849.344159] env[61545]: _type = "Task" [ 849.344159] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.359268] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "refresh_cache-16bc91d0-71c3-4bd9-980b-6574c3fd9335" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.360104] env[61545]: DEBUG nova.objects.instance [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lazy-loading 'flavor' on Instance uuid 16bc91d0-71c3-4bd9-980b-6574c3fd9335 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.361834] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52862b84-75df-314c-b442-cdfa22c493cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.470911] env[61545]: INFO nova.compute.manager [-] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Took 1.06 seconds to deallocate network for instance. [ 849.512875] env[61545]: DEBUG nova.scheduler.client.report [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.677992] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255786, 'name': ReconfigVM_Task, 'duration_secs': 1.403929} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.678329] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.678604] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1689f985-2432-4e87-8c08-cd42af367f76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.687550] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 849.687550] env[61545]: value = "task-4255790" [ 849.687550] env[61545]: _type = "Task" [ 849.687550] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.698075] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.726922] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255789, 'name': CreateVM_Task, 'duration_secs': 1.444365} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.727154] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 849.727906] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.728084] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.728665] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 849.728969] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69143f12-6724-43c4-8141-ed64fe0ebded {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.734675] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 849.734675] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521ccf5b-3123-c93c-87d2-a0d880847a52" [ 849.734675] env[61545]: _type = "Task" [ 849.734675] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.754946] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521ccf5b-3123-c93c-87d2-a0d880847a52, 'name': SearchDatastore_Task, 'duration_secs': 0.014971} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.755385] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.755695] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.755983] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.808511] env[61545]: DEBUG oslo_concurrency.lockutils [req-15a90f9f-ba5f-4a3e-babd-e5353df18b9f req-99c35584-4477-4166-b61c-0da406fc7c25 service nova] Releasing lock "refresh_cache-60edf62d-3fb8-4d85-9a4e-ef71c565d940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.860701] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52862b84-75df-314c-b442-cdfa22c493cc, 'name': SearchDatastore_Task, 'duration_secs': 0.01605} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.861200] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.861805] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 13db992b-db13-451f-a853-9b7de28b9184/13db992b-db13-451f-a853-9b7de28b9184.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.862031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.862359] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.862710] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2700c61-82f2-4695-8259-c6b354a6f345 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.868650] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f625556-a8a2-4f82-8553-d329c8ccff07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.874033] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310c75b6-2ff5-4f60-87d1-827aabb46989 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.880779] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 849.880779] env[61545]: value = "task-4255791" [ 849.880779] env[61545]: _type = "Task" [ 849.880779] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.918591] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.919083] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.919564] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.922123] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6da96b72-0290-4522-a586-a1cf0df2a1af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.924455] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3a752df-36a3-4013-887e-58a7bc814da0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.930966] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255791, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.935085] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 849.935085] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e835a7-70be-bb01-284d-4d397f99c3ba" [ 849.935085] env[61545]: _type = "Task" [ 849.935085] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.940162] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 849.940162] env[61545]: value = "task-4255792" [ 849.940162] env[61545]: _type = "Task" [ 849.940162] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.948144] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e835a7-70be-bb01-284d-4d397f99c3ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.954019] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.982364] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.026237] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.902s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.027296] env[61545]: DEBUG nova.compute.manager [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 850.030469] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.627s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.032861] env[61545]: INFO nova.compute.claims [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.109476] env[61545]: DEBUG nova.compute.manager [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Received event network-changed-16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 850.110089] env[61545]: DEBUG nova.compute.manager [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Refreshing instance network info cache due to event network-changed-16667e69-57e6-426e-8b7e-0da6159f84bb. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 850.110089] env[61545]: DEBUG oslo_concurrency.lockutils [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] Acquiring lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.110089] env[61545]: DEBUG oslo_concurrency.lockutils [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] Acquired lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.110624] env[61545]: DEBUG nova.network.neutron [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Refreshing network info cache for port 16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.200809] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255790, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.396610] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255791, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.453879] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e835a7-70be-bb01-284d-4d397f99c3ba, 'name': SearchDatastore_Task, 'duration_secs': 0.014638} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.454496] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30862c9f-f6dc-4988-8249-a06af2faeda8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.464159] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255792, 'name': PowerOffVM_Task, 'duration_secs': 0.276314} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.467027] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.472750] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Reconfiguring VM instance instance-00000023 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 850.473355] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 850.473355] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e10b70-9fdd-fe5e-5a47-94b6ee50f578" [ 850.473355] env[61545]: _type = "Task" [ 850.473355] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.473749] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f11b3247-8f62-47ea-b0f4-04f3c06c2d26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.504051] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e10b70-9fdd-fe5e-5a47-94b6ee50f578, 'name': SearchDatastore_Task, 'duration_secs': 0.025168} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.505818] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.506239] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 60edf62d-3fb8-4d85-9a4e-ef71c565d940/60edf62d-3fb8-4d85-9a4e-ef71c565d940.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 850.506714] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 850.506714] env[61545]: value = "task-4255793" [ 850.506714] env[61545]: _type = "Task" [ 850.506714] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.506936] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68251b72-3897-4cb7-bdfd-307856b4dd71 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.518956] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255793, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.520732] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 850.520732] env[61545]: value = "task-4255794" [ 850.520732] env[61545]: _type = "Task" [ 850.520732] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.534329] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.541146] env[61545]: DEBUG nova.compute.utils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 850.546117] env[61545]: DEBUG nova.compute.manager [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 850.709589] env[61545]: DEBUG oslo_vmware.api [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255790, 'name': PowerOnVM_Task, 'duration_secs': 0.695096} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.709883] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.713931] env[61545]: DEBUG nova.compute.manager [None req-d7114bb8-0cd9-454d-a20b-cf3969eadca8 tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 850.714984] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05503b4-ed4e-4bd8-bdc6-673ab7da7e46 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.894913] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255791, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624767} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.895307] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 13db992b-db13-451f-a853-9b7de28b9184/13db992b-db13-451f-a853-9b7de28b9184.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.895729] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.898827] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7261d507-fd0b-4b0a-8767-1812bffece59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.908139] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 850.908139] env[61545]: value = "task-4255795" [ 850.908139] env[61545]: _type = "Task" [ 850.908139] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.918384] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255795, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.020392] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255793, 'name': ReconfigVM_Task, 'duration_secs': 0.319054} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.020831] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Reconfigured VM instance instance-00000023 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 851.021052] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.021944] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6448172e-4dd1-425c-9b52-ad46eaf98e9f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.042780] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255794, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.045234] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 851.045234] env[61545]: value = "task-4255796" [ 851.045234] env[61545]: _type = "Task" [ 851.045234] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.051954] env[61545]: DEBUG nova.compute.manager [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 851.067693] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255796, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.070977] env[61545]: DEBUG nova.network.neutron [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Updated VIF entry in instance network info cache for port 16667e69-57e6-426e-8b7e-0da6159f84bb. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.070977] env[61545]: DEBUG nova.network.neutron [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Updating instance_info_cache with network_info: [{"id": "16667e69-57e6-426e-8b7e-0da6159f84bb", "address": "fa:16:3e:10:6f:d5", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16667e69-57", "ovs_interfaceid": "16667e69-57e6-426e-8b7e-0da6159f84bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.420386] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255795, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.187907} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.423662] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.424861] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3a12cc-17a3-4802-a425-36fd67324b34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.453486] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] 13db992b-db13-451f-a853-9b7de28b9184/13db992b-db13-451f-a853-9b7de28b9184.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.457925] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65c63513-af53-4a89-baee-7691328ea6c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.481451] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 851.481451] env[61545]: value = "task-4255797" [ 851.481451] env[61545]: _type = "Task" [ 851.481451] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.505848] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255797, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.535115] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255794, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.558167] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255796, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.577250] env[61545]: DEBUG oslo_concurrency.lockutils [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] Releasing lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.577772] env[61545]: DEBUG nova.compute.manager [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Received event network-changed-16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 851.578077] env[61545]: DEBUG nova.compute.manager [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Refreshing instance network info cache due to event network-changed-16667e69-57e6-426e-8b7e-0da6159f84bb. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 851.578414] env[61545]: DEBUG oslo_concurrency.lockutils [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] Acquiring lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.578620] env[61545]: DEBUG oslo_concurrency.lockutils [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] Acquired lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.578758] env[61545]: DEBUG nova.network.neutron [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Refreshing network info cache for port 16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.884219] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527c5133-de34-400e-892f-3149b1b654ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.892912] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc138ed9-9b43-4e1e-bc9d-a85ee2b6656a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.927808] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519b5c7b-dc19-4912-a62a-d26e6f416f22 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.937191] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87aa6c95-3fb6-44bd-87d6-4f5a380a585e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.951996] env[61545]: DEBUG nova.compute.provider_tree [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.968796] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "f9c9c447-e676-4143-b329-fb6d71bcd553" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.969194] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.996606] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.034178] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255794, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.056700] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255796, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.066901] env[61545]: DEBUG nova.compute.manager [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 852.100166] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 852.100166] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.100166] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 852.100166] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.101180] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 852.101510] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 852.101876] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 852.102221] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 852.102576] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 852.102905] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 852.103242] env[61545]: DEBUG nova.virt.hardware [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 852.105154] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10d3fc4-accd-4d76-92e3-2bba6c6aeded {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.119076] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6965c7c-6efa-46ba-a48d-6ccda339070b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.139953] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.146119] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Creating folder: Project (528f410f2aa345de8097844c3114eb38). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 852.146933] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ff7786d-78e8-44c1-8ec8-bd944ec87d6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.160226] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Created folder: Project (528f410f2aa345de8097844c3114eb38) in parent group-v838542. [ 852.160226] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Creating folder: Instances. Parent ref: group-v838673. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 852.160226] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08870957-3644-4516-bde7-9f7ab213ef58 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.171347] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Created folder: Instances in parent group-v838673. [ 852.171653] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 852.171917] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 852.172324] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d299cccb-6b15-4512-84dd-eee6d14be1bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.193511] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.193511] env[61545]: value = "task-4255800" [ 852.193511] env[61545]: _type = "Task" [ 852.193511] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.203190] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255800, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.456656] env[61545]: DEBUG nova.scheduler.client.report [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 852.506341] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.530657] env[61545]: DEBUG nova.network.neutron [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Updated VIF entry in instance network info cache for port 16667e69-57e6-426e-8b7e-0da6159f84bb. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 852.531178] env[61545]: DEBUG nova.network.neutron [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Updating instance_info_cache with network_info: [{"id": "16667e69-57e6-426e-8b7e-0da6159f84bb", "address": "fa:16:3e:10:6f:d5", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16667e69-57", "ovs_interfaceid": "16667e69-57e6-426e-8b7e-0da6159f84bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.541101] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255794, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.600827} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.541558] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 60edf62d-3fb8-4d85-9a4e-ef71c565d940/60edf62d-3fb8-4d85-9a4e-ef71c565d940.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.541836] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.542390] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-626943c0-f76e-448d-993e-67ab357ecae8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.556152] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 852.556152] env[61545]: value = "task-4255801" [ 852.556152] env[61545]: _type = "Task" [ 852.556152] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.566675] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255796, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.581791] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255801, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.704548] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255800, 'name': CreateVM_Task, 'duration_secs': 0.385039} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.704767] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 852.705376] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.705459] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.708984] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 852.708984] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-875daee1-0d32-4cd7-8887-2431bb60596f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.712199] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 852.712199] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523f3775-fac0-e950-b5ce-ecdbf7f2fb2a" [ 852.712199] env[61545]: _type = "Task" [ 852.712199] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.727520] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523f3775-fac0-e950-b5ce-ecdbf7f2fb2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.967046] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.936s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.967672] env[61545]: DEBUG nova.compute.manager [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 852.977341] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.372s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.977628] env[61545]: INFO nova.compute.claims [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.996275] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255797, 'name': ReconfigVM_Task, 'duration_secs': 1.12817} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.996591] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Reconfigured VM instance instance-0000002d to attach disk [datastore2] 13db992b-db13-451f-a853-9b7de28b9184/13db992b-db13-451f-a853-9b7de28b9184.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.998863] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5ebd566-03c1-4811-a633-db95386980ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.009396] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 853.009396] env[61545]: value = "task-4255802" [ 853.009396] env[61545]: _type = "Task" [ 853.009396] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.021466] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255802, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.035476] env[61545]: DEBUG oslo_concurrency.lockutils [req-8504e4d2-3d32-4654-af96-de4056f20be0 req-a58494d1-ac74-4823-b2d9-1ed976903f00 service nova] Releasing lock "refresh_cache-ced5bde7-07b9-4d07-8b13-49f6fb006eed" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.058359] env[61545]: DEBUG oslo_vmware.api [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4255796, 'name': PowerOnVM_Task, 'duration_secs': 1.603895} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.062041] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.062689] env[61545]: DEBUG nova.compute.manager [None req-cc50baa1-a1c6-4635-b8ea-1121785a5b81 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.064017] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11693cc4-7c48-4a76-9668-8b92e7fb64c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.075901] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255801, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086657} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.078032] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 853.079864] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726e9ead-a275-4d1b-89ff-c50f53caa5f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.103931] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 60edf62d-3fb8-4d85-9a4e-ef71c565d940/60edf62d-3fb8-4d85-9a4e-ef71c565d940.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.104259] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9af1899f-2994-47f4-ad2e-695b3be3e9ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.127595] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 853.127595] env[61545]: value = "task-4255803" [ 853.127595] env[61545]: _type = "Task" [ 853.127595] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.137676] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255803, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.227422] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523f3775-fac0-e950-b5ce-ecdbf7f2fb2a, 'name': SearchDatastore_Task, 'duration_secs': 0.019654} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.229061] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.229061] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.230776] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.230981] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.231336] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.231822] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc3e7501-2743-4799-97b1-2849bb1720c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.244181] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.244541] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.245589] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98725fc2-0a71-42e7-9101-28b92ea949c3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.256848] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 853.256848] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ae8226-ee98-11a7-03c9-a66c8bc0e6e1" [ 853.256848] env[61545]: _type = "Task" [ 853.256848] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.268507] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ae8226-ee98-11a7-03c9-a66c8bc0e6e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.485234] env[61545]: DEBUG nova.compute.utils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 853.490302] env[61545]: DEBUG nova.compute.manager [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 853.503691] env[61545]: DEBUG nova.compute.manager [req-f23c40c9-c648-4921-a8ff-c1a7d99fa1c1 req-55b296cc-d3d5-4e34-96f7-e80447388c9b service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Received event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 853.503955] env[61545]: DEBUG nova.compute.manager [req-f23c40c9-c648-4921-a8ff-c1a7d99fa1c1 req-55b296cc-d3d5-4e34-96f7-e80447388c9b service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing instance network info cache due to event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 853.504950] env[61545]: DEBUG oslo_concurrency.lockutils [req-f23c40c9-c648-4921-a8ff-c1a7d99fa1c1 req-55b296cc-d3d5-4e34-96f7-e80447388c9b service nova] Acquiring lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.505300] env[61545]: DEBUG oslo_concurrency.lockutils [req-f23c40c9-c648-4921-a8ff-c1a7d99fa1c1 req-55b296cc-d3d5-4e34-96f7-e80447388c9b service nova] Acquired lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.505614] env[61545]: DEBUG nova.network.neutron [req-f23c40c9-c648-4921-a8ff-c1a7d99fa1c1 req-55b296cc-d3d5-4e34-96f7-e80447388c9b service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.523481] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255802, 'name': Rename_Task, 'duration_secs': 0.272971} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.523966] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.524376] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbe73139-ff37-42de-b342-52abdc5e5340 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.534013] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 853.534013] env[61545]: value = "task-4255804" [ 853.534013] env[61545]: _type = "Task" [ 853.534013] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.547062] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.646194] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255803, 'name': ReconfigVM_Task, 'duration_secs': 0.448078} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.646194] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 60edf62d-3fb8-4d85-9a4e-ef71c565d940/60edf62d-3fb8-4d85-9a4e-ef71c565d940.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.646730] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be6e1854-a2bc-4692-8234-2551706c17fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.655739] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 853.655739] env[61545]: value = "task-4255805" [ 853.655739] env[61545]: _type = "Task" [ 853.655739] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.666292] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255805, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.768697] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ae8226-ee98-11a7-03c9-a66c8bc0e6e1, 'name': SearchDatastore_Task, 'duration_secs': 0.014291} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.770132] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dd97be3-92c3-42d9-8907-0b841722932f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.778244] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 853.778244] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f518df-6dca-d4bd-dcdf-788d06542b83" [ 853.778244] env[61545]: _type = "Task" [ 853.778244] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.788913] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f518df-6dca-d4bd-dcdf-788d06542b83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.990932] env[61545]: DEBUG nova.compute.manager [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 854.053916] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255804, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.171310] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255805, 'name': Rename_Task, 'duration_secs': 0.182534} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.171610] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.171957] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-059e3f57-7372-4479-befa-7fa7e3588ea6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.184256] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 854.184256] env[61545]: value = "task-4255806" [ 854.184256] env[61545]: _type = "Task" [ 854.184256] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.199136] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255806, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.293344] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f518df-6dca-d4bd-dcdf-788d06542b83, 'name': SearchDatastore_Task, 'duration_secs': 0.01562} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.298675] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.299305] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 69c59bd5-1f57-4fa2-afab-348e5f57501e/69c59bd5-1f57-4fa2-afab-348e5f57501e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.299982] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a2ddab9-2c3c-48a3-bea8-ddc971db9088 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.309565] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 854.309565] env[61545]: value = "task-4255807" [ 854.309565] env[61545]: _type = "Task" [ 854.309565] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.320612] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.431263] env[61545]: DEBUG nova.network.neutron [req-f23c40c9-c648-4921-a8ff-c1a7d99fa1c1 req-55b296cc-d3d5-4e34-96f7-e80447388c9b service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updated VIF entry in instance network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 854.431263] env[61545]: DEBUG nova.network.neutron [req-f23c40c9-c648-4921-a8ff-c1a7d99fa1c1 req-55b296cc-d3d5-4e34-96f7-e80447388c9b service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updating instance_info_cache with network_info: [{"id": "a08667b0-f29d-4bd2-8394-b73a26086238", "address": "fa:16:3e:a5:10:28", "network": {"id": "63a7ad86-09da-4985-898c-dda30d7e3d2f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084327320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d9fac909b8894a1d92f0a9bcd9739d15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa08667b0-f2", "ovs_interfaceid": "a08667b0-f29d-4bd2-8394-b73a26086238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.554901] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255804, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.704769] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255806, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.800680] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e160be8e-5954-42db-8d01-8b1996fb6930 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.816094] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49e4dac-6ae1-4532-8bd6-142b0f5970d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.854805] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecfc171-4059-4cfc-a6e8-cb09eaeca915 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.857715] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255807, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.864990] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e183a6-a0e4-4036-9434-6db230bc8688 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.881347] env[61545]: DEBUG nova.compute.provider_tree [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.933596] env[61545]: DEBUG oslo_concurrency.lockutils [req-f23c40c9-c648-4921-a8ff-c1a7d99fa1c1 req-55b296cc-d3d5-4e34-96f7-e80447388c9b service nova] Releasing lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.009078] env[61545]: DEBUG nova.compute.manager [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 855.033461] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 855.033773] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.033977] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 855.035104] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.035575] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 855.036129] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 855.036495] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 855.036737] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 855.036894] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 855.037199] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 855.037610] env[61545]: DEBUG nova.virt.hardware [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 855.038946] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91668589-cc51-42de-a8b0-ddbe403c9cee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.054503] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ae7fac-faf6-4827-96be-2008fe5c4057 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.059425] env[61545]: DEBUG oslo_vmware.api [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255804, 'name': PowerOnVM_Task, 'duration_secs': 1.146744} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.059768] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.060233] env[61545]: INFO nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Took 13.60 seconds to spawn the instance on the hypervisor. [ 855.060312] env[61545]: DEBUG nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.062440] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbb2c3f-0b91-4085-8974-247974650aa8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.076805] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 855.082793] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.083746] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 855.083976] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e0d5e33-b922-4ca0-a59f-cb7e532ef366 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.107897] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 855.107897] env[61545]: value = "task-4255808" [ 855.107897] env[61545]: _type = "Task" [ 855.107897] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.120640] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255808, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.198055] env[61545]: DEBUG oslo_vmware.api [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255806, 'name': PowerOnVM_Task, 'duration_secs': 0.94638} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.198416] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.198627] env[61545]: INFO nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Took 10.69 seconds to spawn the instance on the hypervisor. [ 855.198796] env[61545]: DEBUG nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.199705] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358b8cc2-ba2d-40e1-90fc-519268a759bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.327043] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713114} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.327043] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 69c59bd5-1f57-4fa2-afab-348e5f57501e/69c59bd5-1f57-4fa2-afab-348e5f57501e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.327043] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.327043] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee3c5c27-a108-40eb-bfd2-8e49803dcefa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.333819] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 855.333819] env[61545]: value = "task-4255809" [ 855.333819] env[61545]: _type = "Task" [ 855.333819] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.345150] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.385773] env[61545]: DEBUG nova.scheduler.client.report [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 855.615459] env[61545]: INFO nova.compute.manager [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Took 48.38 seconds to build instance. [ 855.623768] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255808, 'name': CreateVM_Task, 'duration_secs': 0.347499} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.623992] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.624609] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.624778] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.625243] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 855.625531] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d7eface-c743-49bf-b30c-c46d6ba329f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.633677] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 855.633677] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5233c840-1a0c-30dd-670b-b4d8d7e072d0" [ 855.633677] env[61545]: _type = "Task" [ 855.633677] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.649053] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5233c840-1a0c-30dd-670b-b4d8d7e072d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.723749] env[61545]: INFO nova.compute.manager [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Took 46.39 seconds to build instance. [ 855.844117] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095341} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.844485] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.845366] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79001265-b879-433b-9b45-90ed022d123a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.879850] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 69c59bd5-1f57-4fa2-afab-348e5f57501e/69c59bd5-1f57-4fa2-afab-348e5f57501e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.886024] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-429b97a0-f702-43c0-b534-25a430d222f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.906025] env[61545]: DEBUG nova.compute.manager [req-7cf8ae2f-342f-40d8-badc-fe0559ab7b94 req-9939fb60-bcb4-445c-89ff-d1280d14a6f8 service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Received event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 855.906232] env[61545]: DEBUG nova.compute.manager [req-7cf8ae2f-342f-40d8-badc-fe0559ab7b94 req-9939fb60-bcb4-445c-89ff-d1280d14a6f8 service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing instance network info cache due to event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 855.907135] env[61545]: DEBUG oslo_concurrency.lockutils [req-7cf8ae2f-342f-40d8-badc-fe0559ab7b94 req-9939fb60-bcb4-445c-89ff-d1280d14a6f8 service nova] Acquiring lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.907320] env[61545]: DEBUG oslo_concurrency.lockutils [req-7cf8ae2f-342f-40d8-badc-fe0559ab7b94 req-9939fb60-bcb4-445c-89ff-d1280d14a6f8 service nova] Acquired lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.907557] env[61545]: DEBUG nova.network.neutron [req-7cf8ae2f-342f-40d8-badc-fe0559ab7b94 req-9939fb60-bcb4-445c-89ff-d1280d14a6f8 service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 855.910229] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.935s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.910736] env[61545]: DEBUG nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 855.914366] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.675s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.916231] env[61545]: INFO nova.compute.claims [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.929861] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 855.929861] env[61545]: value = "task-4255810" [ 855.929861] env[61545]: _type = "Task" [ 855.929861] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.941825] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.118578] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8115ca42-5f53-456e-9e4c-7b5a57246e89 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "13db992b-db13-451f-a853-9b7de28b9184" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.581s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.151484] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5233c840-1a0c-30dd-670b-b4d8d7e072d0, 'name': SearchDatastore_Task, 'duration_secs': 0.015232} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.151847] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.152103] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.152495] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.152688] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.152881] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.153206] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90af4c21-6c52-4540-8a82-bd01dd998d6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.164547] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.164760] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 856.165475] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7e7b2d0-1eca-48f0-ab1c-cf297ca19407 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.173196] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 856.173196] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5297f7e1-923f-cb96-b3df-7b271eb860c2" [ 856.173196] env[61545]: _type = "Task" [ 856.173196] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.184098] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5297f7e1-923f-cb96-b3df-7b271eb860c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.227064] env[61545]: DEBUG oslo_concurrency.lockutils [None req-91455120-6cf3-482c-b6d6-64230605969a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.932s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.359303] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ae7563-dc4c-4a3a-ac64-6c7010ad63b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.366691] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1b8a6c-4888-4164-b1d5-9329c795dc74 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Suspending the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 856.367261] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-97e00f19-801b-410f-b125-ce2bad83eeeb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.378027] env[61545]: DEBUG oslo_vmware.api [None req-4a1b8a6c-4888-4164-b1d5-9329c795dc74 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 856.378027] env[61545]: value = "task-4255811" [ 856.378027] env[61545]: _type = "Task" [ 856.378027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.385893] env[61545]: DEBUG oslo_vmware.api [None req-4a1b8a6c-4888-4164-b1d5-9329c795dc74 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255811, 'name': SuspendVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.418186] env[61545]: DEBUG nova.compute.utils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 856.418186] env[61545]: DEBUG nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 856.418186] env[61545]: DEBUG nova.network.neutron [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 856.443124] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255810, 'name': ReconfigVM_Task, 'duration_secs': 0.359063} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.445460] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 69c59bd5-1f57-4fa2-afab-348e5f57501e/69c59bd5-1f57-4fa2-afab-348e5f57501e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.445460] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c9fd074-ef15-44a8-a258-3c530260e939 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.456041] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 856.456041] env[61545]: value = "task-4255812" [ 856.456041] env[61545]: _type = "Task" [ 856.456041] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.465317] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255812, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.538847] env[61545]: DEBUG nova.policy [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '569bd13278d748da986eed7c1f1ba18a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '826b68db604949ea9d17124d5a2ecefb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 856.622585] env[61545]: DEBUG nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 856.690568] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5297f7e1-923f-cb96-b3df-7b271eb860c2, 'name': SearchDatastore_Task, 'duration_secs': 0.015812} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.695028] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe1c3bb4-31d2-49e0-9522-00224e7e0b24 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.703643] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 856.703643] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb453e-8ef1-f440-9d1f-23ffdd1e7c32" [ 856.703643] env[61545]: _type = "Task" [ 856.703643] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.719845] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb453e-8ef1-f440-9d1f-23ffdd1e7c32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.729029] env[61545]: DEBUG nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 857.696601] env[61545]: DEBUG nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 857.706867] env[61545]: DEBUG oslo_vmware.api [None req-4a1b8a6c-4888-4164-b1d5-9329c795dc74 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255811, 'name': SuspendVM_Task} progress is 62%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.707138] env[61545]: WARNING oslo_vmware.common.loopingcall [None req-4a1b8a6c-4888-4164-b1d5-9329c795dc74 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] task run outlasted interval by 0.330925 sec [ 857.708027] env[61545]: DEBUG nova.network.neutron [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Successfully created port: 3f455c93-7dce-4d43-b98a-ed7d25682f00 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.739731] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255812, 'name': Rename_Task, 'duration_secs': 0.231336} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.744985] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.746064] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb453e-8ef1-f440-9d1f-23ffdd1e7c32, 'name': SearchDatastore_Task, 'duration_secs': 0.055802} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.746247] env[61545]: DEBUG oslo_vmware.api [None req-4a1b8a6c-4888-4164-b1d5-9329c795dc74 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255811, 'name': SuspendVM_Task, 'duration_secs': 1.024323} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.746964] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba3ae965-1776-46f1-9690-5238ee675ff6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.749481] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.755336] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0/602bd42d-6afa-4419-8352-73a9daab2fe0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 857.755336] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1b8a6c-4888-4164-b1d5-9329c795dc74 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Suspended the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 857.755336] env[61545]: DEBUG nova.compute.manager [None req-4a1b8a6c-4888-4164-b1d5-9329c795dc74 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 857.755336] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e9fc1b8-edbf-4f55-966a-b8039284dad5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.755336] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0418bd2c-fdd4-4bbe-a2b2-e1fbd78908db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.759874] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.774025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.779605] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 857.779605] env[61545]: value = "task-4255814" [ 857.779605] env[61545]: _type = "Task" [ 857.779605] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.779605] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 857.779605] env[61545]: value = "task-4255813" [ 857.779605] env[61545]: _type = "Task" [ 857.779605] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.804767] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255813, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.805350] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.891623] env[61545]: DEBUG nova.network.neutron [req-7cf8ae2f-342f-40d8-badc-fe0559ab7b94 req-9939fb60-bcb4-445c-89ff-d1280d14a6f8 service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updated VIF entry in instance network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 857.892176] env[61545]: DEBUG nova.network.neutron [req-7cf8ae2f-342f-40d8-badc-fe0559ab7b94 req-9939fb60-bcb4-445c-89ff-d1280d14a6f8 service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updating instance_info_cache with network_info: [{"id": "a08667b0-f29d-4bd2-8394-b73a26086238", "address": "fa:16:3e:a5:10:28", "network": {"id": "63a7ad86-09da-4985-898c-dda30d7e3d2f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084327320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d9fac909b8894a1d92f0a9bcd9739d15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa08667b0-f2", "ovs_interfaceid": "a08667b0-f29d-4bd2-8394-b73a26086238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.227109] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.228346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.228346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.228346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.228346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.231468] env[61545]: INFO nova.compute.manager [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Terminating instance [ 858.306554] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255814, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.313086] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255813, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.404662] env[61545]: DEBUG oslo_concurrency.lockutils [req-7cf8ae2f-342f-40d8-badc-fe0559ab7b94 req-9939fb60-bcb4-445c-89ff-d1280d14a6f8 service nova] Releasing lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.558908] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c9db48-3bfa-4a9f-844f-09deb2e55279 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.573567] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d573843b-2023-405b-a6a1-86899dc38083 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.613625] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebcb22a-69ec-4cf2-b6e7-dc0b229c7acb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.623335] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e7aa9a-8195-4e43-95ab-218803156899 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.644376] env[61545]: DEBUG nova.compute.provider_tree [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 858.720976] env[61545]: DEBUG nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 858.736944] env[61545]: DEBUG nova.compute.manager [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 858.737275] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.738292] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4f2dc8-d50e-4612-88cf-67abce93c984 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.750047] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.750437] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06a4ae00-c16e-48d2-b10c-ad82a3992228 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.757161] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 858.757832] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.758287] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 858.758512] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.758665] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 858.759071] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 858.759071] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 858.759226] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 858.759392] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 858.759575] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 858.760640] env[61545]: DEBUG nova.virt.hardware [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 858.761141] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5bc8f6-ccce-4a2f-8582-5d854b2d4aa6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.767146] env[61545]: DEBUG oslo_vmware.api [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 858.767146] env[61545]: value = "task-4255815" [ 858.767146] env[61545]: _type = "Task" [ 858.767146] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.783278] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7598f474-fe25-4664-8e17-2eaa3b50f698 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.791320] env[61545]: DEBUG oslo_vmware.api [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255815, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.805469] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255814, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722215} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.820024] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0/602bd42d-6afa-4419-8352-73a9daab2fe0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 858.820024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.820024] env[61545]: DEBUG oslo_vmware.api [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255813, 'name': PowerOnVM_Task, 'duration_secs': 0.821752} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.820411] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b56fc6bc-912c-41f6-8d61-0549d23f8596 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.823309] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.823421] env[61545]: INFO nova.compute.manager [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Took 6.76 seconds to spawn the instance on the hypervisor. [ 858.823557] env[61545]: DEBUG nova.compute.manager [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.824394] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5416ae-67db-4cf8-8986-3065275afa0e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.838506] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 858.838506] env[61545]: value = "task-4255816" [ 858.838506] env[61545]: _type = "Task" [ 858.838506] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.848490] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255816, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.172959] env[61545]: ERROR nova.scheduler.client.report [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [req-27c8287b-d646-4771-b918-97e3db7b9ba7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-27c8287b-d646-4771-b918-97e3db7b9ba7"}]} [ 859.190272] env[61545]: DEBUG nova.scheduler.client.report [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 859.217459] env[61545]: DEBUG nova.scheduler.client.report [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 859.217459] env[61545]: DEBUG nova.compute.provider_tree [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 859.232060] env[61545]: DEBUG nova.scheduler.client.report [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 859.259817] env[61545]: DEBUG nova.scheduler.client.report [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 859.280963] env[61545]: DEBUG oslo_vmware.api [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255815, 'name': PowerOffVM_Task, 'duration_secs': 0.233973} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.281583] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.281583] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.281703] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eae6dcca-0c90-4662-9692-e6c3d8006bb1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.354225] env[61545]: INFO nova.compute.manager [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Took 33.82 seconds to build instance. [ 859.360410] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255816, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074326} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.366374] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.366634] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.366970] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.367392] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleting the datastore file [datastore2] 60edf62d-3fb8-4d85-9a4e-ef71c565d940 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.368706] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3ec2df-d42e-485a-94ce-ab398d6520fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.371913] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4097fc7c-3325-451d-bab7-8cad1e2c0894 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.395865] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0/602bd42d-6afa-4419-8352-73a9daab2fe0.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.405209] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d238481c-7ee1-4d17-8aff-89cc0d7b8932 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.427231] env[61545]: DEBUG oslo_vmware.api [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 859.427231] env[61545]: value = "task-4255818" [ 859.427231] env[61545]: _type = "Task" [ 859.427231] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.437252] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 859.437252] env[61545]: value = "task-4255819" [ 859.437252] env[61545]: _type = "Task" [ 859.437252] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.450209] env[61545]: DEBUG oslo_vmware.api [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.458928] env[61545]: DEBUG nova.compute.manager [req-f606a924-4b8d-45ff-a859-4e25139477b8 req-7b3b65bb-8410-4d02-84d9-60a51818d40a service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Received event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 859.459150] env[61545]: DEBUG nova.compute.manager [req-f606a924-4b8d-45ff-a859-4e25139477b8 req-7b3b65bb-8410-4d02-84d9-60a51818d40a service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing instance network info cache due to event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 859.459908] env[61545]: DEBUG oslo_concurrency.lockutils [req-f606a924-4b8d-45ff-a859-4e25139477b8 req-7b3b65bb-8410-4d02-84d9-60a51818d40a service nova] Acquiring lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.459908] env[61545]: DEBUG oslo_concurrency.lockutils [req-f606a924-4b8d-45ff-a859-4e25139477b8 req-7b3b65bb-8410-4d02-84d9-60a51818d40a service nova] Acquired lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 859.460166] env[61545]: DEBUG nova.network.neutron [req-f606a924-4b8d-45ff-a859-4e25139477b8 req-7b3b65bb-8410-4d02-84d9-60a51818d40a service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 859.472496] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "1e5be92c-d727-4515-9e16-85ade2719455" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.472710] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.472927] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.855960] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Acquiring lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.856225] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.867361] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9e8b9257-c790-44fe-979e-ea58f5a7b613 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "69c59bd5-1f57-4fa2-afab-348e5f57501e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.539s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.942388] env[61545]: DEBUG oslo_vmware.api [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4255818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364052} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.946582] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.946850] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.947051] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.947232] env[61545]: INFO nova.compute.manager [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Took 1.21 seconds to destroy the instance on the hypervisor. [ 859.947475] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.948414] env[61545]: DEBUG nova.compute.manager [-] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 859.948414] env[61545]: DEBUG nova.network.neutron [-] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 859.958318] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255819, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.037729] env[61545]: DEBUG nova.network.neutron [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Successfully updated port: 3f455c93-7dce-4d43-b98a-ed7d25682f00 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.108045] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3726622-210d-4acc-ae9e-fcc42742c898 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.119533] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea64b82-1905-4722-8865-46b24db879f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.159541] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3385ce24-57e3-4747-a667-bd52c536e95a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.170381] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b6fdd1-3fe8-4f50-9f3b-7b32b61cfd27 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.184673] env[61545]: DEBUG nova.compute.provider_tree [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.361314] env[61545]: DEBUG nova.compute.utils [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 860.371252] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 860.393101] env[61545]: DEBUG nova.network.neutron [req-f606a924-4b8d-45ff-a859-4e25139477b8 req-7b3b65bb-8410-4d02-84d9-60a51818d40a service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updated VIF entry in instance network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.393489] env[61545]: DEBUG nova.network.neutron [req-f606a924-4b8d-45ff-a859-4e25139477b8 req-7b3b65bb-8410-4d02-84d9-60a51818d40a service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updating instance_info_cache with network_info: [{"id": "a08667b0-f29d-4bd2-8394-b73a26086238", "address": "fa:16:3e:a5:10:28", "network": {"id": "63a7ad86-09da-4985-898c-dda30d7e3d2f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084327320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d9fac909b8894a1d92f0a9bcd9739d15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa08667b0-f2", "ovs_interfaceid": "a08667b0-f29d-4bd2-8394-b73a26086238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.459949] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255819, 'name': ReconfigVM_Task, 'duration_secs': 0.556393} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.459949] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0/602bd42d-6afa-4419-8352-73a9daab2fe0.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.461124] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ab8c8f8-abc2-4b18-850b-3927ede03d0a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.469089] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 860.469089] env[61545]: value = "task-4255820" [ 860.469089] env[61545]: _type = "Task" [ 860.469089] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.478946] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255820, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.541540] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "refresh_cache-ecf98c79-da3d-44be-9c76-c3fccc688235" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.541540] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquired lock "refresh_cache-ecf98c79-da3d-44be-9c76-c3fccc688235" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.541540] env[61545]: DEBUG nova.network.neutron [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.590228] env[61545]: DEBUG oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c38b-b3f7-2b3a-872f-767a899d06d1/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 860.592232] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c506e20-985c-4645-9043-325c40483149 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.600601] env[61545]: DEBUG oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c38b-b3f7-2b3a-872f-767a899d06d1/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 860.600806] env[61545]: ERROR oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c38b-b3f7-2b3a-872f-767a899d06d1/disk-0.vmdk due to incomplete transfer. [ 860.601081] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-40b6d492-a4d4-4a12-a13f-02d72b7ecbdf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.614548] env[61545]: DEBUG oslo_vmware.rw_handles [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d9c38b-b3f7-2b3a-872f-767a899d06d1/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 860.614548] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Uploaded image 09fddafd-8bca-4e4b-a0a8-79b436d4e27e to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 860.616085] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 860.616954] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-61ab669f-6747-4263-a93d-278ca6382660 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.626637] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 860.626637] env[61545]: value = "task-4255821" [ 860.626637] env[61545]: _type = "Task" [ 860.626637] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.639056] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255821, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.689038] env[61545]: DEBUG nova.scheduler.client.report [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.864059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.896946] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.897557] env[61545]: DEBUG oslo_concurrency.lockutils [req-f606a924-4b8d-45ff-a859-4e25139477b8 req-7b3b65bb-8410-4d02-84d9-60a51818d40a service nova] Releasing lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.955031] env[61545]: DEBUG nova.network.neutron [-] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.981234] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255820, 'name': Rename_Task, 'duration_secs': 0.18843} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.982037] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.982428] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a8bb6b3-ad68-4ed8-81dd-fb8c337bb289 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.991726] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 860.991726] env[61545]: value = "task-4255822" [ 860.991726] env[61545]: _type = "Task" [ 860.991726] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.003018] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255822, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.080409] env[61545]: DEBUG nova.network.neutron [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 861.141457] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255821, 'name': Destroy_Task, 'duration_secs': 0.41915} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.142059] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Destroyed the VM [ 861.142424] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 861.142658] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bf541345-bbae-4ac7-b9e4-d5fe9d165208 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.151496] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 861.151496] env[61545]: value = "task-4255823" [ 861.151496] env[61545]: _type = "Task" [ 861.151496] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.162209] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255823, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.193839] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.279s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.194443] env[61545]: DEBUG nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 861.197561] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.008s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.198312] env[61545]: DEBUG nova.objects.instance [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lazy-loading 'resources' on Instance uuid fa08b76f-d64d-46e9-9865-1ab2e9b1d823 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 861.299408] env[61545]: DEBUG nova.network.neutron [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Updating instance_info_cache with network_info: [{"id": "3f455c93-7dce-4d43-b98a-ed7d25682f00", "address": "fa:16:3e:37:39:05", "network": {"id": "418e3b5d-5958-4b9f-9e61-7e16e8c77f97", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-333428270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "826b68db604949ea9d17124d5a2ecefb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f969bd9-e040-4b9b-85b2-7c61231584ad", "external-id": "nsx-vlan-transportzone-995", "segmentation_id": 995, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f455c93-7d", "ovs_interfaceid": "3f455c93-7dce-4d43-b98a-ed7d25682f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.458528] env[61545]: INFO nova.compute.manager [-] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Took 1.51 seconds to deallocate network for instance. [ 861.506452] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255822, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.662544] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255823, 'name': RemoveSnapshot_Task} progress is 74%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.702393] env[61545]: DEBUG nova.compute.utils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 861.709578] env[61545]: DEBUG nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 861.710607] env[61545]: DEBUG nova.network.neutron [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 861.802471] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Releasing lock "refresh_cache-ecf98c79-da3d-44be-9c76-c3fccc688235" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.802471] env[61545]: DEBUG nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Instance network_info: |[{"id": "3f455c93-7dce-4d43-b98a-ed7d25682f00", "address": "fa:16:3e:37:39:05", "network": {"id": "418e3b5d-5958-4b9f-9e61-7e16e8c77f97", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-333428270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "826b68db604949ea9d17124d5a2ecefb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f969bd9-e040-4b9b-85b2-7c61231584ad", "external-id": "nsx-vlan-transportzone-995", "segmentation_id": 995, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f455c93-7d", "ovs_interfaceid": "3f455c93-7dce-4d43-b98a-ed7d25682f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 861.802749] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:39:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f969bd9-e040-4b9b-85b2-7c61231584ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f455c93-7dce-4d43-b98a-ed7d25682f00', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.812209] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Creating folder: Project (826b68db604949ea9d17124d5a2ecefb). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.815284] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-187016ac-4a9a-4a90-b135-7dfe6785830a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.828980] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Created folder: Project (826b68db604949ea9d17124d5a2ecefb) in parent group-v838542. [ 861.829201] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Creating folder: Instances. Parent ref: group-v838677. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.832371] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8fb000b-0ed0-4cd0-83a9-55536b873762 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.835972] env[61545]: DEBUG nova.policy [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de2ff39939bd440b8df0819c626fc2ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f56d2d605ffd4d098959105ab53d9803', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 861.848902] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Created folder: Instances in parent group-v838677. [ 861.849215] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 861.849433] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.849662] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02fe8faa-7360-4b28-abd5-01320b9cf5fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.880950] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.880950] env[61545]: value = "task-4255826" [ 861.880950] env[61545]: _type = "Task" [ 861.880950] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.893019] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255826, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.966312] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Acquiring lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.966621] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.966864] env[61545]: INFO nova.compute.manager [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Attaching volume ce082a16-643e-421e-a475-8e5507ec6ab0 to /dev/sdb [ 861.975915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.006801] env[61545]: DEBUG oslo_vmware.api [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255822, 'name': PowerOnVM_Task, 'duration_secs': 0.60636} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.006801] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.006801] env[61545]: INFO nova.compute.manager [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Took 7.00 seconds to spawn the instance on the hypervisor. [ 862.006801] env[61545]: DEBUG nova.compute.manager [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 862.007034] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3ecbfe-b6c8-42f1-a7f6-dcb65c46e7fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.021406] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469be06a-982f-4d1b-8d42-7cc61f574f2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.030366] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f786104-933e-42f5-9f19-e59fac1187fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.048086] env[61545]: DEBUG nova.virt.block_device [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Updating existing volume attachment record: 00584012-f4e5-4124-b86a-133b68a92b18 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 862.166835] env[61545]: DEBUG oslo_vmware.api [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255823, 'name': RemoveSnapshot_Task, 'duration_secs': 0.795257} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.167171] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 862.167418] env[61545]: INFO nova.compute.manager [None req-535b5d11-bc1c-4d43-a258-bac8046d2a1e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Took 20.93 seconds to snapshot the instance on the hypervisor. [ 862.210479] env[61545]: DEBUG nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 862.241569] env[61545]: DEBUG nova.compute.manager [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 862.242701] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6755e1b3-73f2-45c2-a443-339f78f777d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.358723] env[61545]: DEBUG nova.compute.manager [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Received event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 862.358918] env[61545]: DEBUG nova.compute.manager [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing instance network info cache due to event network-changed-a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 862.359158] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Acquiring lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.359288] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Acquired lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.359444] env[61545]: DEBUG nova.network.neutron [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Refreshing network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.394463] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255826, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.442068] env[61545]: DEBUG nova.network.neutron [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Successfully created port: fc3b9500-79f7-4be8-a298-f3522507a716 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.485176] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0947b8a0-066b-43ed-b19b-28db52fccffc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.498884] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9af2f4-ff7f-4be1-9766-5ffb38e85607 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.544687] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90593387-9a9e-4139-8988-aa623a7d29b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.551068] env[61545]: INFO nova.compute.manager [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Took 34.17 seconds to build instance. [ 862.560053] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54488dfb-f898-486a-ac48-582d989779cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.578749] env[61545]: DEBUG nova.compute.provider_tree [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.757102] env[61545]: INFO nova.compute.manager [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] instance snapshotting [ 862.757346] env[61545]: WARNING nova.compute.manager [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 862.761020] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bc06b4-4f18-4a0a-92a9-a7f4bfea56f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.784070] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d28c05f-0369-4499-995e-60c33ccfe140 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.893668] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255826, 'name': CreateVM_Task, 'duration_secs': 0.693315} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.893909] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.894624] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.894802] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.895135] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 862.895409] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fac19b0-d24d-48e9-85a3-64f112b578c3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.901896] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 862.901896] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529f6426-2700-b997-2ff4-1fa41c381d91" [ 862.901896] env[61545]: _type = "Task" [ 862.901896] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.918301] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529f6426-2700-b997-2ff4-1fa41c381d91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.062265] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ea78d035-cc15-419d-9565-399e887e5e58 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "602bd42d-6afa-4419-8352-73a9daab2fe0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.256s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.082208] env[61545]: DEBUG nova.scheduler.client.report [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.221782] env[61545]: DEBUG nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 863.260278] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0ed6a45a-0c2d-43c8-94d3-0da3debac597',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1743994676',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 863.260534] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.260723] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 863.260917] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.261463] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 863.261463] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 863.261622] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 863.261699] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 863.261872] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 863.262108] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 863.262288] env[61545]: DEBUG nova.virt.hardware [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 863.263223] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3bb074-c1e0-420f-ab74-c00e06f3a4f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.274040] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067b61fe-08c8-4559-9850-630d151a521d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.298182] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 863.298493] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-babdf03f-b549-45d9-a7aa-81df94e98cb4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.306704] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 863.306704] env[61545]: value = "task-4255830" [ 863.306704] env[61545]: _type = "Task" [ 863.306704] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.321487] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255830, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.330893] env[61545]: DEBUG nova.network.neutron [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updated VIF entry in instance network info cache for port a08667b0-f29d-4bd2-8394-b73a26086238. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 863.331460] env[61545]: DEBUG nova.network.neutron [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updating instance_info_cache with network_info: [{"id": "a08667b0-f29d-4bd2-8394-b73a26086238", "address": "fa:16:3e:a5:10:28", "network": {"id": "63a7ad86-09da-4985-898c-dda30d7e3d2f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1084327320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d9fac909b8894a1d92f0a9bcd9739d15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa08667b0-f2", "ovs_interfaceid": "a08667b0-f29d-4bd2-8394-b73a26086238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.419177] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529f6426-2700-b997-2ff4-1fa41c381d91, 'name': SearchDatastore_Task, 'duration_secs': 0.012121} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.419591] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.420008] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 863.423556] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.423884] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.424094] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.424697] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e824793c-1a09-4d80-9a6a-2c15ac345acf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.437162] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.437162] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 863.438179] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94cd16ff-28bf-4876-b889-bad93f30fa01 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.445549] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 863.445549] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b96817-cc8b-7364-30ba-830314156efb" [ 863.445549] env[61545]: _type = "Task" [ 863.445549] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.458770] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b96817-cc8b-7364-30ba-830314156efb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.566298] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 863.589508] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.390s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.592550] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.880s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.595897] env[61545]: INFO nova.compute.claims [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.615859] env[61545]: INFO nova.compute.manager [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Rebuilding instance [ 863.634230] env[61545]: INFO nova.scheduler.client.report [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleted allocations for instance fa08b76f-d64d-46e9-9865-1ab2e9b1d823 [ 863.680179] env[61545]: DEBUG nova.compute.manager [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 863.681912] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd407078-c96c-4287-b65e-76bb61eb30d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.821569] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "609ba431-b42b-4b0d-9c16-06e19bee114c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.821900] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "609ba431-b42b-4b0d-9c16-06e19bee114c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.822108] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "609ba431-b42b-4b0d-9c16-06e19bee114c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.822318] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "609ba431-b42b-4b0d-9c16-06e19bee114c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.822494] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "609ba431-b42b-4b0d-9c16-06e19bee114c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.824189] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255830, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.828024] env[61545]: INFO nova.compute.manager [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Terminating instance [ 863.839028] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Releasing lock "refresh_cache-609ba431-b42b-4b0d-9c16-06e19bee114c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.839028] env[61545]: DEBUG nova.compute.manager [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Received event network-vif-plugged-3f455c93-7dce-4d43-b98a-ed7d25682f00 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 863.839028] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Acquiring lock "ecf98c79-da3d-44be-9c76-c3fccc688235-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.839028] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Lock "ecf98c79-da3d-44be-9c76-c3fccc688235-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.839028] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Lock "ecf98c79-da3d-44be-9c76-c3fccc688235-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.839028] env[61545]: DEBUG nova.compute.manager [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] No waiting events found dispatching network-vif-plugged-3f455c93-7dce-4d43-b98a-ed7d25682f00 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 863.839028] env[61545]: WARNING nova.compute.manager [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Received unexpected event network-vif-plugged-3f455c93-7dce-4d43-b98a-ed7d25682f00 for instance with vm_state building and task_state spawning. [ 863.839028] env[61545]: DEBUG nova.compute.manager [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Received event network-changed-3f455c93-7dce-4d43-b98a-ed7d25682f00 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 863.839028] env[61545]: DEBUG nova.compute.manager [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Refreshing instance network info cache due to event network-changed-3f455c93-7dce-4d43-b98a-ed7d25682f00. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 863.839028] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Acquiring lock "refresh_cache-ecf98c79-da3d-44be-9c76-c3fccc688235" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.839028] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Acquired lock "refresh_cache-ecf98c79-da3d-44be-9c76-c3fccc688235" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.839028] env[61545]: DEBUG nova.network.neutron [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Refreshing network info cache for port 3f455c93-7dce-4d43-b98a-ed7d25682f00 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 863.958767] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b96817-cc8b-7364-30ba-830314156efb, 'name': SearchDatastore_Task, 'duration_secs': 0.011105} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.959664] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca4c8c82-ba4b-4982-b7b6-2f0a127202d7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.966547] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 863.966547] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5208868e-8998-9a5a-8597-729d790c3e16" [ 863.966547] env[61545]: _type = "Task" [ 863.966547] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.976394] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5208868e-8998-9a5a-8597-729d790c3e16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.094562] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.142764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e81e7a7-1dc2-4e51-942a-02f3f3665511 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "fa08b76f-d64d-46e9-9865-1ab2e9b1d823" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.233s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.322837] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255830, 'name': CreateSnapshot_Task, 'duration_secs': 1.001654} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.323228] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 864.324152] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a41dab-491f-4510-b3c0-066aca1af1d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.334814] env[61545]: DEBUG nova.compute.manager [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 864.335086] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 864.339103] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc03bf1-aefe-490c-b0ff-8c0940334021 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.352908] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.353513] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c798c7e-bc0b-4967-b2e4-5925cf535cc3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.363233] env[61545]: DEBUG oslo_vmware.api [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 864.363233] env[61545]: value = "task-4255831" [ 864.363233] env[61545]: _type = "Task" [ 864.363233] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.374032] env[61545]: DEBUG oslo_vmware.api [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.484846] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5208868e-8998-9a5a-8597-729d790c3e16, 'name': SearchDatastore_Task, 'duration_secs': 0.023639} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.485306] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.485801] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ecf98c79-da3d-44be-9c76-c3fccc688235/ecf98c79-da3d-44be-9c76-c3fccc688235.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 864.486237] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51d2bb3b-6b37-4877-b0bd-52d9f6fde8ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.501046] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 864.501046] env[61545]: value = "task-4255833" [ 864.501046] env[61545]: _type = "Task" [ 864.501046] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.516290] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255833, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.699346] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.699983] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d25553a2-2c9d-4628-b26f-f645cd9f9686 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.710818] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 864.710818] env[61545]: value = "task-4255834" [ 864.710818] env[61545]: _type = "Task" [ 864.710818] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.723958] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.852029] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 864.852422] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-49d69bdc-54de-4421-b9c5-4238c51d0d42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.864185] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 864.864185] env[61545]: value = "task-4255835" [ 864.864185] env[61545]: _type = "Task" [ 864.864185] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.887688] env[61545]: DEBUG oslo_vmware.api [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255831, 'name': PowerOffVM_Task, 'duration_secs': 0.265879} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.891816] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 864.892014] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 864.892347] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255835, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.895408] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-464b3f48-e12a-47e3-8d70-7a59da893f8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.011326] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255833, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.060868] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 865.061697] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 865.061697] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Deleting the datastore file [datastore2] 609ba431-b42b-4b0d-9c16-06e19bee114c {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 865.061984] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9648628-4596-4cde-8411-89201db44178 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.072662] env[61545]: DEBUG oslo_vmware.api [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for the task: (returnval){ [ 865.072662] env[61545]: value = "task-4255837" [ 865.072662] env[61545]: _type = "Task" [ 865.072662] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.084163] env[61545]: DEBUG oslo_vmware.api [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.229089] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255834, 'name': PowerOffVM_Task, 'duration_secs': 0.233078} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.229089] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 865.229089] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 865.229735] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b39628-d6ba-4f37-a380-f5936a255432 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.243021] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 865.243021] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67947e8c-5bfa-493c-8e41-172972003acc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.272340] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 865.272340] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 865.272340] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Deleting the datastore file [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 865.272340] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-917c4b46-7207-4220-b793-d4212c4325af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.286382] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 865.286382] env[61545]: value = "task-4255839" [ 865.286382] env[61545]: _type = "Task" [ 865.286382] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.306333] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.385258] env[61545]: DEBUG nova.network.neutron [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Updated VIF entry in instance network info cache for port 3f455c93-7dce-4d43-b98a-ed7d25682f00. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 865.385258] env[61545]: DEBUG nova.network.neutron [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Updating instance_info_cache with network_info: [{"id": "3f455c93-7dce-4d43-b98a-ed7d25682f00", "address": "fa:16:3e:37:39:05", "network": {"id": "418e3b5d-5958-4b9f-9e61-7e16e8c77f97", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-333428270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "826b68db604949ea9d17124d5a2ecefb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f969bd9-e040-4b9b-85b2-7c61231584ad", "external-id": "nsx-vlan-transportzone-995", "segmentation_id": 995, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f455c93-7d", "ovs_interfaceid": "3f455c93-7dce-4d43-b98a-ed7d25682f00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.388951] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255835, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.438259] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0034c30b-ae33-498a-ada5-d5de286854db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.447737] env[61545]: DEBUG nova.network.neutron [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Successfully updated port: fc3b9500-79f7-4be8-a298-f3522507a716 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.452169] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ed97ef-402e-4d9d-aed3-b80a12e5783f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.492648] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94678dc-a29d-4e1c-be9f-18b9d04dbdca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.505415] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923a08ed-8340-488a-b43b-f89612cf0f57 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.516833] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255833, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726151} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.526568] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ecf98c79-da3d-44be-9c76-c3fccc688235/ecf98c79-da3d-44be-9c76-c3fccc688235.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.526568] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.527443] env[61545]: DEBUG nova.compute.provider_tree [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.529355] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-818be84c-e8ac-4e16-a0d8-16788d12f6f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.543439] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 865.543439] env[61545]: value = "task-4255840" [ 865.543439] env[61545]: _type = "Task" [ 865.543439] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.559419] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.589407] env[61545]: DEBUG oslo_vmware.api [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.796020] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284576} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.796283] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 865.796479] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 865.796696] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 865.880787] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255835, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.890656] env[61545]: DEBUG oslo_concurrency.lockutils [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] Releasing lock "refresh_cache-ecf98c79-da3d-44be-9c76-c3fccc688235" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.890954] env[61545]: DEBUG nova.compute.manager [req-5927ea76-60c9-46ac-b865-8deb4958a52e req-6207f231-5d61-40ec-9da1-f21125a2488f service nova] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Received event network-vif-deleted-f5ad4055-0195-4342-9493-6b942aab9f3e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 865.958684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.958684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.958684] env[61545]: DEBUG nova.network.neutron [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.058478] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213468} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.062339] env[61545]: ERROR nova.scheduler.client.report [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [req-4f14dd08-1261-4b00-9281-c37d7b3ed13c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4f14dd08-1261-4b00-9281-c37d7b3ed13c"}]} [ 866.062812] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.067845] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f30e5f2-7195-4817-a212-fc7eb2c2e4a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.101979] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] ecf98c79-da3d-44be-9c76-c3fccc688235/ecf98c79-da3d-44be-9c76-c3fccc688235.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.104153] env[61545]: DEBUG nova.scheduler.client.report [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 866.107281] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44477197-e7b1-4719-992b-1aadccb8a981 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.125593] env[61545]: DEBUG oslo_vmware.api [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Task: {'id': task-4255837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.540378} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.126378] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 866.126600] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 866.126804] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 866.126951] env[61545]: INFO nova.compute.manager [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Took 1.79 seconds to destroy the instance on the hypervisor. [ 866.127299] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 866.127490] env[61545]: DEBUG nova.compute.manager [-] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 866.127576] env[61545]: DEBUG nova.network.neutron [-] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 866.133649] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 866.133649] env[61545]: value = "task-4255841" [ 866.133649] env[61545]: _type = "Task" [ 866.133649] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.148687] env[61545]: DEBUG nova.scheduler.client.report [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 866.148687] env[61545]: DEBUG nova.compute.provider_tree [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 866.151258] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.151440] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.152039] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255841, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.167313] env[61545]: DEBUG nova.scheduler.client.report [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 866.167530] env[61545]: DEBUG nova.compute.provider_tree [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 80 to 81 during operation: update_aggregates {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 866.192450] env[61545]: DEBUG nova.scheduler.client.report [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 866.273366] env[61545]: DEBUG nova.compute.manager [req-346662c5-fea8-4ada-a560-f21fc08bdc26 req-890e66ab-9d31-4065-bb0d-bc937a875966 service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Received event network-vif-plugged-fc3b9500-79f7-4be8-a298-f3522507a716 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 866.273366] env[61545]: DEBUG oslo_concurrency.lockutils [req-346662c5-fea8-4ada-a560-f21fc08bdc26 req-890e66ab-9d31-4065-bb0d-bc937a875966 service nova] Acquiring lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.273366] env[61545]: DEBUG oslo_concurrency.lockutils [req-346662c5-fea8-4ada-a560-f21fc08bdc26 req-890e66ab-9d31-4065-bb0d-bc937a875966 service nova] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.273366] env[61545]: DEBUG oslo_concurrency.lockutils [req-346662c5-fea8-4ada-a560-f21fc08bdc26 req-890e66ab-9d31-4065-bb0d-bc937a875966 service nova] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.273366] env[61545]: DEBUG nova.compute.manager [req-346662c5-fea8-4ada-a560-f21fc08bdc26 req-890e66ab-9d31-4065-bb0d-bc937a875966 service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] No waiting events found dispatching network-vif-plugged-fc3b9500-79f7-4be8-a298-f3522507a716 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 866.273366] env[61545]: WARNING nova.compute.manager [req-346662c5-fea8-4ada-a560-f21fc08bdc26 req-890e66ab-9d31-4065-bb0d-bc937a875966 service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Received unexpected event network-vif-plugged-fc3b9500-79f7-4be8-a298-f3522507a716 for instance with vm_state building and task_state spawning. [ 866.386247] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255835, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.526174] env[61545]: DEBUG nova.network.neutron [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.640831] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 866.641075] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838682', 'volume_id': 'ce082a16-643e-421e-a475-8e5507ec6ab0', 'name': 'volume-ce082a16-643e-421e-a475-8e5507ec6ab0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0554c462-1dc5-4043-94ac-7a3d28ed05e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce082a16-643e-421e-a475-8e5507ec6ab0', 'serial': 'ce082a16-643e-421e-a475-8e5507ec6ab0'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 866.641968] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514dd22c-9122-4e3c-99cb-3239aed498a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.657276] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255841, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.676950] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ad7427-31e0-4c19-9fae-ae35dc3a460c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.692292] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.692513] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.005s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.721986] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] volume-ce082a16-643e-421e-a475-8e5507ec6ab0/volume-ce082a16-643e-421e-a475-8e5507ec6ab0.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.727485] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51e41073-a575-4cc4-82f0-477526a75670 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.753959] env[61545]: DEBUG oslo_vmware.api [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Waiting for the task: (returnval){ [ 866.753959] env[61545]: value = "task-4255842" [ 866.753959] env[61545]: _type = "Task" [ 866.753959] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.765669] env[61545]: DEBUG oslo_vmware.api [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255842, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.840853] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 866.841068] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.841232] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 866.841421] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.841569] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 866.841716] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 866.841928] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 866.842289] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 866.842508] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 866.842682] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 866.842854] env[61545]: DEBUG nova.virt.hardware [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 866.843746] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a7caab-320d-49e8-b326-09687629843a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.855121] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d344cd2-8c15-48d7-97f8-6be0eb52f201 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.874139] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.880475] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 866.881630] env[61545]: DEBUG nova.network.neutron [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance_info_cache with network_info: [{"id": "fc3b9500-79f7-4be8-a298-f3522507a716", "address": "fa:16:3e:fc:1e:9e", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc3b9500-79", "ovs_interfaceid": "fc3b9500-79f7-4be8-a298-f3522507a716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.886310] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.886852] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-258811d4-06a8-437b-96b9-901a993e10c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.908551] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255835, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.910037] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.910037] env[61545]: value = "task-4255843" [ 866.910037] env[61545]: _type = "Task" [ 866.910037] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.923394] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255843, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.043126] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6bc8b39-8bef-4b22-9a27-002e0121164c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.052499] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222b3748-392b-4879-9762-1af1f476da60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.089379] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad66fab-6e69-4686-bf14-7080920cbdf5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.098582] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a9661b-19ef-44f4-aa9e-f02973485574 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.114556] env[61545]: DEBUG nova.compute.provider_tree [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 867.144876] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255841, 'name': ReconfigVM_Task, 'duration_secs': 0.763283} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.148030] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Reconfigured VM instance instance-00000031 to attach disk [datastore2] ecf98c79-da3d-44be-9c76-c3fccc688235/ecf98c79-da3d-44be-9c76-c3fccc688235.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.148030] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d296abd9-e9db-4e68-865c-f7d72b4f434f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.154246] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 867.154246] env[61545]: value = "task-4255844" [ 867.154246] env[61545]: _type = "Task" [ 867.154246] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.166698] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255844, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.188356] env[61545]: DEBUG nova.network.neutron [-] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.265791] env[61545]: DEBUG oslo_vmware.api [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255842, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.388476] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.389060] env[61545]: DEBUG nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Instance network_info: |[{"id": "fc3b9500-79f7-4be8-a298-f3522507a716", "address": "fa:16:3e:fc:1e:9e", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc3b9500-79", "ovs_interfaceid": "fc3b9500-79f7-4be8-a298-f3522507a716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 867.389212] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255835, 'name': CloneVM_Task, 'duration_secs': 2.227408} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.389503] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:1e:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc3b9500-79f7-4be8-a298-f3522507a716', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.397386] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.397659] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Created linked-clone VM from snapshot [ 867.397933] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.398684] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7284cf-b4f0-498f-8a3e-33f7ec429bfe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.401323] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92624793-5735-4a9a-b9ae-f940cf5d110b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.422987] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Uploading image a6f04a5f-3f40-4297-bbe6-7cf21745fa53 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 867.426455] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.426455] env[61545]: value = "task-4255845" [ 867.426455] env[61545]: _type = "Task" [ 867.426455] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.433136] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255843, 'name': CreateVM_Task, 'duration_secs': 0.342107} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.433136] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.433553] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.433710] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.434039] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 867.434303] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffb6aa83-401c-47c4-9690-462673b714e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.439560] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255845, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.442738] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 867.442738] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5278bfb4-3062-b598-8c31-1bec2b13248b" [ 867.442738] env[61545]: _type = "Task" [ 867.442738] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.451462] env[61545]: DEBUG oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 867.451462] env[61545]: value = "vm-838684" [ 867.451462] env[61545]: _type = "VirtualMachine" [ 867.451462] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 867.451748] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b1888f97-5b70-4de5-abd0-ef214d53eeed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.456667] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5278bfb4-3062-b598-8c31-1bec2b13248b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.462336] env[61545]: DEBUG oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lease: (returnval){ [ 867.462336] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5217b500-c392-5553-d276-4bf59d664034" [ 867.462336] env[61545]: _type = "HttpNfcLease" [ 867.462336] env[61545]: } obtained for exporting VM: (result){ [ 867.462336] env[61545]: value = "vm-838684" [ 867.462336] env[61545]: _type = "VirtualMachine" [ 867.462336] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 867.462749] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the lease: (returnval){ [ 867.462749] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5217b500-c392-5553-d276-4bf59d664034" [ 867.462749] env[61545]: _type = "HttpNfcLease" [ 867.462749] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 867.469738] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 867.469738] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5217b500-c392-5553-d276-4bf59d664034" [ 867.469738] env[61545]: _type = "HttpNfcLease" [ 867.469738] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 867.654537] env[61545]: DEBUG nova.scheduler.client.report [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 867.654910] env[61545]: DEBUG nova.compute.provider_tree [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 81 to 82 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 867.655112] env[61545]: DEBUG nova.compute.provider_tree [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 867.669547] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255844, 'name': Rename_Task, 'duration_secs': 0.242297} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.669547] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.669683] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9d0ae73-cf3f-4e82-a1e1-21268e7275a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.676952] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 867.676952] env[61545]: value = "task-4255847" [ 867.676952] env[61545]: _type = "Task" [ 867.676952] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.686333] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255847, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.690937] env[61545]: INFO nova.compute.manager [-] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Took 1.56 seconds to deallocate network for instance. [ 867.766285] env[61545]: DEBUG oslo_vmware.api [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255842, 'name': ReconfigVM_Task, 'duration_secs': 0.624677} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.766637] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Reconfigured VM instance instance-0000000b to attach disk [datastore1] volume-ce082a16-643e-421e-a475-8e5507ec6ab0/volume-ce082a16-643e-421e-a475-8e5507ec6ab0.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.771751] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2e58954-8e80-4802-b47d-1f6b02b6c206 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.787843] env[61545]: DEBUG oslo_vmware.api [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Waiting for the task: (returnval){ [ 867.787843] env[61545]: value = "task-4255848" [ 867.787843] env[61545]: _type = "Task" [ 867.787843] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.797304] env[61545]: DEBUG oslo_vmware.api [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255848, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.939229] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255845, 'name': CreateVM_Task, 'duration_secs': 0.353957} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.939528] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.940658] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.955389] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5278bfb4-3062-b598-8c31-1bec2b13248b, 'name': SearchDatastore_Task, 'duration_secs': 0.024569} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.955690] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.955930] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.956191] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.956338] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.956564] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.956971] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.957297] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 867.957537] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4649807-9e96-4426-97a8-cdc587289ec9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.959802] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31a71a87-792b-43ec-acb9-d455785e2f79 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.967925] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 867.967925] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52818d60-0ea0-bf98-2678-abd7ffe712e4" [ 867.967925] env[61545]: _type = "Task" [ 867.967925] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.973126] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.973445] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.976571] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-549a067f-02e4-45dc-b7b2-0c4368a4e1a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.978920] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 867.978920] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5217b500-c392-5553-d276-4bf59d664034" [ 867.978920] env[61545]: _type = "HttpNfcLease" [ 867.978920] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 867.979670] env[61545]: DEBUG oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 867.979670] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5217b500-c392-5553-d276-4bf59d664034" [ 867.979670] env[61545]: _type = "HttpNfcLease" [ 867.979670] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 867.980785] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65153e4f-1316-498a-8f58-e90ac881b7ca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.990326] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52818d60-0ea0-bf98-2678-abd7ffe712e4, 'name': SearchDatastore_Task, 'duration_secs': 0.013534} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.990630] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 867.990630] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5230bc1a-1db4-f2bb-2694-269d8702f833" [ 867.990630] env[61545]: _type = "Task" [ 867.990630] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.991482] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.991728] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.991913] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.998687] env[61545]: DEBUG oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe8699-5953-bb89-c38a-4b46005666ef/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 867.999059] env[61545]: DEBUG oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe8699-5953-bb89-c38a-4b46005666ef/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 868.060086] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5230bc1a-1db4-f2bb-2694-269d8702f833, 'name': SearchDatastore_Task, 'duration_secs': 0.010478} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.065021] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21fc20b5-1686-4f3a-8837-a9224aec8994 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.070189] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 868.070189] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528ba80d-a77f-424f-36a0-979c70b3e7bd" [ 868.070189] env[61545]: _type = "Task" [ 868.070189] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.079675] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528ba80d-a77f-424f-36a0-979c70b3e7bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.102959] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-852d57ec-7c17-495a-b79c-bacf52af8f29 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.164311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.572s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.165020] env[61545]: DEBUG nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 868.168452] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.844s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.170432] env[61545]: INFO nova.compute.claims [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.186588] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255847, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.196908] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.299994] env[61545]: DEBUG oslo_vmware.api [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255848, 'name': ReconfigVM_Task, 'duration_secs': 0.16042} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.300409] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838682', 'volume_id': 'ce082a16-643e-421e-a475-8e5507ec6ab0', 'name': 'volume-ce082a16-643e-421e-a475-8e5507ec6ab0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0554c462-1dc5-4043-94ac-7a3d28ed05e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce082a16-643e-421e-a475-8e5507ec6ab0', 'serial': 'ce082a16-643e-421e-a475-8e5507ec6ab0'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 868.404608] env[61545]: DEBUG nova.compute.manager [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Received event network-changed-fc3b9500-79f7-4be8-a298-f3522507a716 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 868.404809] env[61545]: DEBUG nova.compute.manager [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Refreshing instance network info cache due to event network-changed-fc3b9500-79f7-4be8-a298-f3522507a716. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 868.405086] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] Acquiring lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.405257] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] Acquired lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.405534] env[61545]: DEBUG nova.network.neutron [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Refreshing network info cache for port fc3b9500-79f7-4be8-a298-f3522507a716 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.584025] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528ba80d-a77f-424f-36a0-979c70b3e7bd, 'name': SearchDatastore_Task, 'duration_secs': 0.010947} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.584870] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.585757] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0/602bd42d-6afa-4419-8352-73a9daab2fe0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.586279] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.586483] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.586778] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fa08643-822a-487e-9cf8-bd1b8bed3017 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.589695] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08801124-e680-464b-b5d6-ba20aa857c0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.600372] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 868.600372] env[61545]: value = "task-4255849" [ 868.600372] env[61545]: _type = "Task" [ 868.600372] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.605806] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.605806] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.614854] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7963bbfa-7d2c-4533-9dcc-1c77031471a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.628874] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.629462] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 868.629462] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52af9125-42ae-e532-2740-1c5a572d9fff" [ 868.629462] env[61545]: _type = "Task" [ 868.629462] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.640683] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52af9125-42ae-e532-2740-1c5a572d9fff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.675290] env[61545]: DEBUG nova.compute.utils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 868.681905] env[61545]: DEBUG nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 868.682542] env[61545]: DEBUG nova.network.neutron [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 868.696885] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "d517f427-8580-481b-b50f-150da6c571b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.696885] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "d517f427-8580-481b-b50f-150da6c571b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.696885] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "d517f427-8580-481b-b50f-150da6c571b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.697150] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "d517f427-8580-481b-b50f-150da6c571b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.697276] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "d517f427-8580-481b-b50f-150da6c571b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.701706] env[61545]: DEBUG oslo_vmware.api [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255847, 'name': PowerOnVM_Task, 'duration_secs': 0.536933} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.701706] env[61545]: INFO nova.compute.manager [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Terminating instance [ 868.705517] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.705775] env[61545]: INFO nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Took 9.98 seconds to spawn the instance on the hypervisor. [ 868.706566] env[61545]: DEBUG nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 868.708063] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d885ec0-285c-4cac-a87c-43f12836d7b1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.755750] env[61545]: DEBUG nova.policy [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d5677e0c7a74f8abe4dd364c619b47f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a1542baa59a4be387f3fe1526116d37', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 868.873151] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "5b2fb040-a964-479f-ae3f-4f428248d64b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.873542] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "5b2fb040-a964-479f-ae3f-4f428248d64b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.873856] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "5b2fb040-a964-479f-ae3f-4f428248d64b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.874138] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "5b2fb040-a964-479f-ae3f-4f428248d64b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.874714] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "5b2fb040-a964-479f-ae3f-4f428248d64b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.879077] env[61545]: INFO nova.compute.manager [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Terminating instance [ 869.114907] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255849, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.141371] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52af9125-42ae-e532-2740-1c5a572d9fff, 'name': SearchDatastore_Task, 'duration_secs': 0.021417} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.144817] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69ecb7f9-26c3-4ce9-886e-a0e5d1b64389 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.156624] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 869.156624] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5262e698-70cb-d220-9236-5e2bc22259ac" [ 869.156624] env[61545]: _type = "Task" [ 869.156624] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.165592] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5262e698-70cb-d220-9236-5e2bc22259ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.182041] env[61545]: DEBUG nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 869.211391] env[61545]: DEBUG nova.compute.manager [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 869.211391] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.211391] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc3d78d-d7b9-4948-b5ce-a67f72a8550a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.241399] env[61545]: INFO nova.compute.manager [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Took 38.69 seconds to build instance. [ 869.245958] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 869.246348] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40d5bbc2-ddc5-4029-a6e1-1c80cfac40c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.254605] env[61545]: DEBUG oslo_vmware.api [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 869.254605] env[61545]: value = "task-4255850" [ 869.254605] env[61545]: _type = "Task" [ 869.254605] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.273281] env[61545]: DEBUG oslo_vmware.api [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.360130] env[61545]: DEBUG nova.objects.instance [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Lazy-loading 'flavor' on Instance uuid 0554c462-1dc5-4043-94ac-7a3d28ed05e1 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 869.385588] env[61545]: DEBUG nova.compute.manager [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 869.385588] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.385588] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9796e82-fbb5-462c-b146-8e4ab513ac2a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.394666] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 869.394666] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdfaced8-db91-4b7f-8393-d56cf1b813a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.408762] env[61545]: DEBUG oslo_vmware.api [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 869.408762] env[61545]: value = "task-4255851" [ 869.408762] env[61545]: _type = "Task" [ 869.408762] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.425810] env[61545]: DEBUG oslo_vmware.api [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.439707] env[61545]: DEBUG nova.network.neutron [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updated VIF entry in instance network info cache for port fc3b9500-79f7-4be8-a298-f3522507a716. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.439991] env[61545]: DEBUG nova.network.neutron [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance_info_cache with network_info: [{"id": "fc3b9500-79f7-4be8-a298-f3522507a716", "address": "fa:16:3e:fc:1e:9e", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc3b9500-79", "ovs_interfaceid": "fc3b9500-79f7-4be8-a298-f3522507a716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.464520] env[61545]: DEBUG nova.network.neutron [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Successfully created port: 8564beef-15a1-4c85-b090-c45df7394356 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 869.617063] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255849, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650262} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.617747] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0/602bd42d-6afa-4419-8352-73a9daab2fe0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.617747] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.618078] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-daf901ea-79c7-4fed-8279-08d73137a56a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.631131] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 869.631131] env[61545]: value = "task-4255852" [ 869.631131] env[61545]: _type = "Task" [ 869.631131] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.645903] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.672931] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5262e698-70cb-d220-9236-5e2bc22259ac, 'name': SearchDatastore_Task, 'duration_secs': 0.063123} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.673381] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.673887] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417/dad53420-37f1-42ef-b0d3-e35c73b97417.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.673887] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bb546ba-e82b-4b87-a2b2-36ca562d8292 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.685963] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 869.685963] env[61545]: value = "task-4255853" [ 869.685963] env[61545]: _type = "Task" [ 869.685963] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.701985] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255853, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.745167] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92e92fa3-e10b-4e85-b30e-f5a8ee2396e6 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "ecf98c79-da3d-44be-9c76-c3fccc688235" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.227s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.750479] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.751713] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.766487] env[61545]: DEBUG oslo_vmware.api [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255850, 'name': PowerOffVM_Task, 'duration_secs': 0.311771} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.766487] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.766808] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.767506] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a75984f1-c8b6-46e0-bd40-5f2becef97a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.849978] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.851573] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.851573] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleting the datastore file [datastore2] d517f427-8580-481b-b50f-150da6c571b9 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.851573] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c932f993-e04d-4fa6-bae5-8419e78b5ab4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.859875] env[61545]: DEBUG oslo_vmware.api [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 869.859875] env[61545]: value = "task-4255855" [ 869.859875] env[61545]: _type = "Task" [ 869.859875] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.866938] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.866938] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.866938] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.867174] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.867370] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.871837] env[61545]: DEBUG oslo_concurrency.lockutils [None req-57278e5e-5f30-4d7d-b812-3a9732249959 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.905s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.873795] env[61545]: INFO nova.compute.manager [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Terminating instance [ 869.880490] env[61545]: DEBUG oslo_vmware.api [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.921290] env[61545]: DEBUG oslo_vmware.api [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255851, 'name': PowerOffVM_Task, 'duration_secs': 0.265281} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.924704] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.924704] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.924879] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf49a364-92d0-4427-8669-3af405807be6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.943907] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] Releasing lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.944277] env[61545]: DEBUG nova.compute.manager [req-0a46862f-e3ee-4c76-8934-5741567a1be0 req-d1e2b2c9-687e-4961-8576-c7dfe65f94ba service nova] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Received event network-vif-deleted-a08667b0-f29d-4bd2-8394-b73a26086238 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 870.014810] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.014810] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.015199] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleting the datastore file [datastore2] 5b2fb040-a964-479f-ae3f-4f428248d64b {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.015642] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0a33572-d551-41f6-b0e7-653d358adad1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.028721] env[61545]: DEBUG oslo_vmware.api [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for the task: (returnval){ [ 870.028721] env[61545]: value = "task-4255857" [ 870.028721] env[61545]: _type = "Task" [ 870.028721] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.037977] env[61545]: DEBUG oslo_vmware.api [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.090072] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39ff2b8-4b79-494c-9f55-b57b6eba2fc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.097913] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b20c74-c3d7-40cf-b206-0d2064085bb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.131503] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d74e55a-07df-42c0-930b-a7022ddbbb6c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.146716] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e2cb4a-dc4a-44c2-ad36-b6decfec0831 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.150848] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105314} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.151164] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.153222] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62deab3e-5785-4f96-9c58-a9277c070d21 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.165480] env[61545]: DEBUG nova.compute.provider_tree [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.189547] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0/602bd42d-6afa-4419-8352-73a9daab2fe0.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.190304] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8da249fd-58aa-4aa6-b472-e1d37ffa034c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.211721] env[61545]: DEBUG nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 870.221866] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255853, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.227028] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 870.227028] env[61545]: value = "task-4255858" [ 870.227028] env[61545]: _type = "Task" [ 870.227028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.240605] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255858, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.247558] env[61545]: DEBUG nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 870.253020] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 870.253200] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.253284] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 870.255055] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.255055] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 870.255055] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 870.255055] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 870.255055] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 870.255055] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 870.255055] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 870.255055] env[61545]: DEBUG nova.virt.hardware [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 870.262487] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7281eb-0b9a-4931-a57e-530ffe9345f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.268154] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.268154] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.268154] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.268154] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.268154] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.268154] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.268154] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 870.270833] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.274340] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52d213e-ee0f-456d-9cea-06290fa04248 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.373972] env[61545]: DEBUG oslo_vmware.api [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.383591] env[61545]: DEBUG nova.compute.manager [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 870.385536] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 870.385536] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb4aa3b-cacc-4bf3-9ae0-8ada72926999 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.395927] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 870.396557] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-082cf684-4275-47b0-8d2a-bc8e478e0adb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.406641] env[61545]: DEBUG oslo_vmware.api [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 870.406641] env[61545]: value = "task-4255859" [ 870.406641] env[61545]: _type = "Task" [ 870.406641] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.417729] env[61545]: DEBUG oslo_vmware.api [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255859, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.543752] env[61545]: DEBUG oslo_vmware.api [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.709383] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255853, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667581} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.710368] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417/dad53420-37f1-42ef-b0d3-e35c73b97417.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.710863] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.711356] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42a2576d-d41d-4c1e-a8f9-5d2e35d2a0d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.723013] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 870.723013] env[61545]: value = "task-4255860" [ 870.723013] env[61545]: _type = "Task" [ 870.723013] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.724553] env[61545]: DEBUG nova.scheduler.client.report [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 870.724960] env[61545]: DEBUG nova.compute.provider_tree [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 82 to 83 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 870.725402] env[61545]: DEBUG nova.compute.provider_tree [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.743621] env[61545]: DEBUG oslo_concurrency.lockutils [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Acquiring lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.743621] env[61545]: DEBUG oslo_concurrency.lockutils [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.752830] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.756337] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255858, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.774905] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.798025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.880705] env[61545]: DEBUG oslo_vmware.api [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.567881} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.880999] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.881262] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 870.884883] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 870.884883] env[61545]: INFO nova.compute.manager [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: d517f427-8580-481b-b50f-150da6c571b9] Took 1.67 seconds to destroy the instance on the hypervisor. [ 870.884883] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.884883] env[61545]: DEBUG nova.compute.manager [-] [instance: d517f427-8580-481b-b50f-150da6c571b9] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 870.884883] env[61545]: DEBUG nova.network.neutron [-] [instance: d517f427-8580-481b-b50f-150da6c571b9] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.919127] env[61545]: DEBUG oslo_vmware.api [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255859, 'name': PowerOffVM_Task, 'duration_secs': 0.308434} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.919127] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 870.919127] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 870.919463] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6d9a2d4-8eda-482c-a2f9-97c57d55b60d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.994966] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.995316] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.995560] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Deleting the datastore file [datastore2] 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.995895] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7cf9189-0377-4df1-b519-f8a9807aa1b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.004510] env[61545]: DEBUG oslo_vmware.api [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 871.004510] env[61545]: value = "task-4255862" [ 871.004510] env[61545]: _type = "Task" [ 871.004510] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.016116] env[61545]: DEBUG oslo_vmware.api [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255862, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.040710] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18d2f2c9-b369-4d22-a626-02be14a79a59 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "interface-ecf98c79-da3d-44be-9c76-c3fccc688235-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.040938] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18d2f2c9-b369-4d22-a626-02be14a79a59 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "interface-ecf98c79-da3d-44be-9c76-c3fccc688235-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.041132] env[61545]: DEBUG nova.objects.instance [None req-18d2f2c9-b369-4d22-a626-02be14a79a59 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lazy-loading 'flavor' on Instance uuid ecf98c79-da3d-44be-9c76-c3fccc688235 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 871.042888] env[61545]: DEBUG oslo_vmware.api [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Task: {'id': task-4255857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.533684} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.047045] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.047045] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.047045] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.047045] env[61545]: INFO nova.compute.manager [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Took 1.66 seconds to destroy the instance on the hypervisor. [ 871.047045] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 871.047045] env[61545]: DEBUG nova.compute.manager [-] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 871.047045] env[61545]: DEBUG nova.network.neutron [-] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 871.246468] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.078s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.247162] env[61545]: DEBUG nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 871.250282] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255858, 'name': ReconfigVM_Task, 'duration_secs': 0.53996} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.254923] env[61545]: INFO nova.compute.manager [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Detaching volume ce082a16-643e-421e-a475-8e5507ec6ab0 [ 871.257401] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.502s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.259526] env[61545]: INFO nova.compute.claims [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.262509] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0/602bd42d-6afa-4419-8352-73a9daab2fe0.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.263245] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100997} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.263982] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2036de3e-12c7-49f2-bfc0-31b53dcba598 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.268716] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.268716] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41adeaab-537f-4e5c-a8f7-6f6c457375f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.312648] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417/dad53420-37f1-42ef-b0d3-e35c73b97417.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.312648] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 871.312648] env[61545]: value = "task-4255863" [ 871.312648] env[61545]: _type = "Task" [ 871.312648] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.312648] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26b4531d-de54-4dd7-93ab-5dbdfd4061a4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.335079] env[61545]: INFO nova.virt.block_device [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Attempting to driver detach volume ce082a16-643e-421e-a475-8e5507ec6ab0 from mountpoint /dev/sdb [ 871.335328] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 871.335555] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838682', 'volume_id': 'ce082a16-643e-421e-a475-8e5507ec6ab0', 'name': 'volume-ce082a16-643e-421e-a475-8e5507ec6ab0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0554c462-1dc5-4043-94ac-7a3d28ed05e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce082a16-643e-421e-a475-8e5507ec6ab0', 'serial': 'ce082a16-643e-421e-a475-8e5507ec6ab0'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 871.337188] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c69b87-fc17-4504-8be0-1edb0506a121 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.343573] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 871.343573] env[61545]: value = "task-4255864" [ 871.343573] env[61545]: _type = "Task" [ 871.343573] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.371816] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255863, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.373210] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf567ad-eb29-4a54-9690-47dd721a8276 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.380262] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.385958] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfdb989-5e3b-411e-a5e3-c31f96ced6b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.409198] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5cb64d-4d50-4fd3-9705-79a5f33ea6af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.428358] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] The volume has not been displaced from its original location: [datastore1] volume-ce082a16-643e-421e-a475-8e5507ec6ab0/volume-ce082a16-643e-421e-a475-8e5507ec6ab0.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 871.431518] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Reconfiguring VM instance instance-0000000b to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 871.431859] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96aa039b-c924-4622-918b-48b701de5e8a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.452282] env[61545]: DEBUG oslo_vmware.api [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Waiting for the task: (returnval){ [ 871.452282] env[61545]: value = "task-4255865" [ 871.452282] env[61545]: _type = "Task" [ 871.452282] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.461873] env[61545]: DEBUG oslo_vmware.api [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255865, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.517059] env[61545]: DEBUG oslo_vmware.api [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255862, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27286} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.517348] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.517572] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.517820] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.517985] env[61545]: INFO nova.compute.manager [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 871.518287] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 871.520275] env[61545]: DEBUG nova.compute.manager [-] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 871.520275] env[61545]: DEBUG nova.network.neutron [-] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 871.547318] env[61545]: DEBUG nova.objects.instance [None req-18d2f2c9-b369-4d22-a626-02be14a79a59 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lazy-loading 'pci_requests' on Instance uuid ecf98c79-da3d-44be-9c76-c3fccc688235 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 871.752511] env[61545]: DEBUG nova.compute.utils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 871.754591] env[61545]: DEBUG nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 871.755038] env[61545]: DEBUG nova.network.neutron [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 871.814998] env[61545]: DEBUG nova.policy [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cdf64fe2fad47a582da369ec3c378ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd3ea97039a04935931355f1b8c10ed7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 871.841532] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255863, 'name': Rename_Task, 'duration_secs': 0.231917} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.841841] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 871.842120] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a2eb6ab-0c65-4aa0-8803-60aa8377f7c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.849800] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 871.849800] env[61545]: value = "task-4255866" [ 871.849800] env[61545]: _type = "Task" [ 871.849800] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.854346] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255864, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.867259] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255866, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.930535] env[61545]: DEBUG nova.network.neutron [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Successfully updated port: 8564beef-15a1-4c85-b090-c45df7394356 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 871.967035] env[61545]: DEBUG oslo_vmware.api [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255865, 'name': ReconfigVM_Task, 'duration_secs': 0.359206} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.967035] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Reconfigured VM instance instance-0000000b to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 871.971465] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b907d973-7c47-4a3d-aff2-860dfff78b4d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.989036] env[61545]: DEBUG oslo_vmware.api [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Waiting for the task: (returnval){ [ 871.989036] env[61545]: value = "task-4255867" [ 871.989036] env[61545]: _type = "Task" [ 871.989036] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.000394] env[61545]: DEBUG oslo_vmware.api [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255867, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.026598] env[61545]: DEBUG nova.network.neutron [-] [instance: d517f427-8580-481b-b50f-150da6c571b9] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.052694] env[61545]: DEBUG nova.objects.base [None req-18d2f2c9-b369-4d22-a626-02be14a79a59 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 872.052694] env[61545]: DEBUG nova.network.neutron [None req-18d2f2c9-b369-4d22-a626-02be14a79a59 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 872.169376] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18d2f2c9-b369-4d22-a626-02be14a79a59 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "interface-ecf98c79-da3d-44be-9c76-c3fccc688235-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.128s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.205077] env[61545]: DEBUG nova.network.neutron [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Successfully created port: 52cec3f8-5316-4f38-86e3-82087b8e5fac {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.259355] env[61545]: DEBUG nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 872.278033] env[61545]: DEBUG nova.network.neutron [-] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.354372] env[61545]: DEBUG nova.compute.manager [req-6f1d80fc-d5db-4359-abec-f3da1c325daa req-17be1b98-b20e-4289-aad2-49b306cc3fab service nova] [instance: d517f427-8580-481b-b50f-150da6c571b9] Received event network-vif-deleted-70ed8a3b-0e27-462d-84c6-05185727c589 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 872.365551] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255864, 'name': ReconfigVM_Task, 'duration_secs': 0.527593} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.369714] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfigured VM instance instance-00000032 to attach disk [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417/dad53420-37f1-42ef-b0d3-e35c73b97417.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.371155] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec4ffc41-9d42-4e9d-ba37-853da62408f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.377496] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255866, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.383617] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 872.383617] env[61545]: value = "task-4255868" [ 872.383617] env[61545]: _type = "Task" [ 872.383617] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.400023] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255868, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.434091] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.434091] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.434091] env[61545]: DEBUG nova.network.neutron [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.504571] env[61545]: DEBUG oslo_vmware.api [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255867, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.531053] env[61545]: INFO nova.compute.manager [-] [instance: d517f427-8580-481b-b50f-150da6c571b9] Took 1.65 seconds to deallocate network for instance. [ 872.538502] env[61545]: DEBUG nova.network.neutron [-] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.779330] env[61545]: INFO nova.compute.manager [-] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Took 1.73 seconds to deallocate network for instance. [ 872.866043] env[61545]: DEBUG oslo_vmware.api [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255866, 'name': PowerOnVM_Task, 'duration_secs': 0.561825} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.869295] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.869508] env[61545]: DEBUG nova.compute.manager [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.871197] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0e7d79-6140-46ad-b83e-f58a1962a90f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.894693] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255868, 'name': Rename_Task, 'duration_secs': 0.259109} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.894958] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.897850] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd0b9c9f-f8d6-4d95-bbb8-cf123f8adf98 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.906817] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 872.906817] env[61545]: value = "task-4255869" [ 872.906817] env[61545]: _type = "Task" [ 872.906817] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.921311] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255869, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.957046] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fd0084-0106-4875-a29c-2316bcb06532 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.965719] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d647d31-da40-4acf-8efa-5f56e36b34d9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.973327] env[61545]: DEBUG nova.network.neutron [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.006111] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3fc3bd-49b8-4f11-bac2-fb38244137fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.018640] env[61545]: DEBUG oslo_vmware.api [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255867, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.018640] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299078af-20c5-4925-b892-2c85d6570d5f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.034849] env[61545]: DEBUG nova.compute.provider_tree [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.039203] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.041287] env[61545]: INFO nova.compute.manager [-] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Took 1.52 seconds to deallocate network for instance. [ 873.207845] env[61545]: DEBUG nova.network.neutron [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Updating instance_info_cache with network_info: [{"id": "8564beef-15a1-4c85-b090-c45df7394356", "address": "fa:16:3e:22:66:bd", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8564beef-15", "ovs_interfaceid": "8564beef-15a1-4c85-b090-c45df7394356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.273264] env[61545]: DEBUG nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 873.288357] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.297107] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 873.297428] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.297666] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 873.297881] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.298104] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 873.298931] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 873.298931] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 873.298931] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 873.298931] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 873.299203] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 873.299327] env[61545]: DEBUG nova.virt.hardware [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 873.300232] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e4f6c8-bd88-4bd7-934f-8ebda9bde634 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.309710] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3e3cc5-0462-49b2-ac87-30d236495d6c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.394733] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.418684] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255869, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.514259] env[61545]: DEBUG oslo_vmware.api [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Task: {'id': task-4255867, 'name': ReconfigVM_Task, 'duration_secs': 1.214081} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.514888] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838682', 'volume_id': 'ce082a16-643e-421e-a475-8e5507ec6ab0', 'name': 'volume-ce082a16-643e-421e-a475-8e5507ec6ab0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0554c462-1dc5-4043-94ac-7a3d28ed05e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce082a16-643e-421e-a475-8e5507ec6ab0', 'serial': 'ce082a16-643e-421e-a475-8e5507ec6ab0'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 873.542073] env[61545]: DEBUG nova.scheduler.client.report [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 873.548547] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.713462] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.713462] env[61545]: DEBUG nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Instance network_info: |[{"id": "8564beef-15a1-4c85-b090-c45df7394356", "address": "fa:16:3e:22:66:bd", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8564beef-15", "ovs_interfaceid": "8564beef-15a1-4c85-b090-c45df7394356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 873.713462] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:66:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8564beef-15a1-4c85-b090-c45df7394356', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.720439] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 873.720701] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.720980] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3aa7de5-492a-4419-92ca-8aa111c417b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.753875] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.753875] env[61545]: value = "task-4255870" [ 873.753875] env[61545]: _type = "Task" [ 873.753875] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.765465] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255870, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.873348] env[61545]: DEBUG nova.network.neutron [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Successfully updated port: 52cec3f8-5316-4f38-86e3-82087b8e5fac {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 873.923364] env[61545]: DEBUG oslo_vmware.api [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255869, 'name': PowerOnVM_Task, 'duration_secs': 0.904668} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.923656] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.923883] env[61545]: INFO nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Took 10.70 seconds to spawn the instance on the hypervisor. [ 873.924246] env[61545]: DEBUG nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 873.925086] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac1ee90-ae18-4b7b-9a34-bf17e70ae907 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.993411] env[61545]: DEBUG nova.compute.manager [req-92d05691-0beb-469f-95e3-6e07285ba395 req-ef2885a5-5a12-4a7a-8fcc-e9249bdf3ae0 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Received event network-vif-plugged-52cec3f8-5316-4f38-86e3-82087b8e5fac {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 873.993624] env[61545]: DEBUG oslo_concurrency.lockutils [req-92d05691-0beb-469f-95e3-6e07285ba395 req-ef2885a5-5a12-4a7a-8fcc-e9249bdf3ae0 service nova] Acquiring lock "62301196-fb8a-45fe-9193-0ad8f7126ab5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.993878] env[61545]: DEBUG oslo_concurrency.lockutils [req-92d05691-0beb-469f-95e3-6e07285ba395 req-ef2885a5-5a12-4a7a-8fcc-e9249bdf3ae0 service nova] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.994065] env[61545]: DEBUG oslo_concurrency.lockutils [req-92d05691-0beb-469f-95e3-6e07285ba395 req-ef2885a5-5a12-4a7a-8fcc-e9249bdf3ae0 service nova] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.994239] env[61545]: DEBUG nova.compute.manager [req-92d05691-0beb-469f-95e3-6e07285ba395 req-ef2885a5-5a12-4a7a-8fcc-e9249bdf3ae0 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] No waiting events found dispatching network-vif-plugged-52cec3f8-5316-4f38-86e3-82087b8e5fac {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 873.994405] env[61545]: WARNING nova.compute.manager [req-92d05691-0beb-469f-95e3-6e07285ba395 req-ef2885a5-5a12-4a7a-8fcc-e9249bdf3ae0 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Received unexpected event network-vif-plugged-52cec3f8-5316-4f38-86e3-82087b8e5fac for instance with vm_state building and task_state spawning. [ 874.048355] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.791s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.048987] env[61545]: DEBUG nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 874.051814] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.250s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.053330] env[61545]: INFO nova.compute.claims [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 874.102926] env[61545]: DEBUG nova.objects.instance [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Lazy-loading 'flavor' on Instance uuid 0554c462-1dc5-4043-94ac-7a3d28ed05e1 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.265160] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255870, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.377292] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.377292] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.377514] env[61545]: DEBUG nova.network.neutron [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.418282] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "ecf98c79-da3d-44be-9c76-c3fccc688235" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.419032] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "ecf98c79-da3d-44be-9c76-c3fccc688235" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.419032] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "ecf98c79-da3d-44be-9c76-c3fccc688235-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.419373] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "ecf98c79-da3d-44be-9c76-c3fccc688235-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.419557] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "ecf98c79-da3d-44be-9c76-c3fccc688235-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.421618] env[61545]: INFO nova.compute.manager [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Terminating instance [ 874.446650] env[61545]: INFO nova.compute.manager [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Took 40.23 seconds to build instance. [ 874.467473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "602bd42d-6afa-4419-8352-73a9daab2fe0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.467744] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "602bd42d-6afa-4419-8352-73a9daab2fe0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.467959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "602bd42d-6afa-4419-8352-73a9daab2fe0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.468159] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "602bd42d-6afa-4419-8352-73a9daab2fe0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.468371] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "602bd42d-6afa-4419-8352-73a9daab2fe0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.470326] env[61545]: INFO nova.compute.manager [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Terminating instance [ 874.476977] env[61545]: DEBUG nova.compute.manager [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Received event network-vif-plugged-8564beef-15a1-4c85-b090-c45df7394356 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 874.477437] env[61545]: DEBUG oslo_concurrency.lockutils [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] Acquiring lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.477437] env[61545]: DEBUG oslo_concurrency.lockutils [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] Lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.477559] env[61545]: DEBUG oslo_concurrency.lockutils [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] Lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.477707] env[61545]: DEBUG nova.compute.manager [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] No waiting events found dispatching network-vif-plugged-8564beef-15a1-4c85-b090-c45df7394356 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 874.477881] env[61545]: WARNING nova.compute.manager [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Received unexpected event network-vif-plugged-8564beef-15a1-4c85-b090-c45df7394356 for instance with vm_state building and task_state spawning. [ 874.478095] env[61545]: DEBUG nova.compute.manager [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Received event network-vif-deleted-ca54cc50-d211-4ada-8e47-1747a860fa83 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 874.478224] env[61545]: DEBUG nova.compute.manager [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Received event network-changed-8564beef-15a1-4c85-b090-c45df7394356 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 874.478379] env[61545]: DEBUG nova.compute.manager [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Refreshing instance network info cache due to event network-changed-8564beef-15a1-4c85-b090-c45df7394356. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 874.478566] env[61545]: DEBUG oslo_concurrency.lockutils [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] Acquiring lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.478702] env[61545]: DEBUG oslo_concurrency.lockutils [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] Acquired lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.478888] env[61545]: DEBUG nova.network.neutron [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Refreshing network info cache for port 8564beef-15a1-4c85-b090-c45df7394356 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.558672] env[61545]: DEBUG nova.compute.utils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 874.560389] env[61545]: DEBUG nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 874.560612] env[61545]: DEBUG nova.network.neutron [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 874.612736] env[61545]: DEBUG nova.policy [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cdf64fe2fad47a582da369ec3c378ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd3ea97039a04935931355f1b8c10ed7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 874.765017] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255870, 'name': CreateVM_Task, 'duration_secs': 0.563969} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.765186] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.765890] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.766074] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.766439] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 874.766879] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8581ab0-82af-4afb-a0f8-b759b69806f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.773121] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 874.773121] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b64856-9a6e-38a8-dd98-f6bc8b660e7b" [ 874.773121] env[61545]: _type = "Task" [ 874.773121] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.782633] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b64856-9a6e-38a8-dd98-f6bc8b660e7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.910247] env[61545]: DEBUG nova.network.neutron [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Successfully created port: 14efafb4-97cd-4720-a2dd-36f9af0a8644 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.929144] env[61545]: DEBUG nova.network.neutron [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.931840] env[61545]: DEBUG nova.compute.manager [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.931941] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.934036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7cb06a-7d0b-4a70-ab07-ceb529d76416 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.941657] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.941952] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db8639a4-281f-41a4-958c-c3b85627573d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.949296] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1370f1f1-fa12-45ee-bf0f-9bcc022173e6 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.426s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.952123] env[61545]: DEBUG oslo_vmware.api [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 874.952123] env[61545]: value = "task-4255872" [ 874.952123] env[61545]: _type = "Task" [ 874.952123] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.971613] env[61545]: DEBUG oslo_vmware.api [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255872, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.974695] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "refresh_cache-602bd42d-6afa-4419-8352-73a9daab2fe0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.974779] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired lock "refresh_cache-602bd42d-6afa-4419-8352-73a9daab2fe0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.975676] env[61545]: DEBUG nova.network.neutron [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.066707] env[61545]: DEBUG nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 875.115484] env[61545]: DEBUG oslo_concurrency.lockutils [None req-255b4580-a572-4e28-91b9-0cce8f47d8a8 tempest-VolumesAssistedSnapshotsTest-1790119817 tempest-VolumesAssistedSnapshotsTest-1790119817-project-admin] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.372s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.289829] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b64856-9a6e-38a8-dd98-f6bc8b660e7b, 'name': SearchDatastore_Task, 'duration_secs': 0.021514} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.290635] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.290981] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.291429] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.291824] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.292159] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.292523] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0bdaf667-15bc-4d46-adb0-b853d9b864e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.307934] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.308302] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.309410] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be33acec-5b03-42f1-b83b-9e259f7a65b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.323393] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 875.323393] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e5211a-f598-e493-e039-1430084c868c" [ 875.323393] env[61545]: _type = "Task" [ 875.323393] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.333907] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e5211a-f598-e493-e039-1430084c868c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.463924] env[61545]: DEBUG nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 875.467518] env[61545]: DEBUG oslo_vmware.api [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255872, 'name': PowerOffVM_Task, 'duration_secs': 0.255159} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.471489] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.471489] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.472138] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2687f77c-c696-4b64-a754-66f8e99f17a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.544356] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.544652] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.544857] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Deleting the datastore file [datastore2] ecf98c79-da3d-44be-9c76-c3fccc688235 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.545180] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac93d418-7ef7-4f7d-8ce8-f0bf7136047d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.554476] env[61545]: DEBUG oslo_vmware.api [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for the task: (returnval){ [ 875.554476] env[61545]: value = "task-4255874" [ 875.554476] env[61545]: _type = "Task" [ 875.554476] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.566256] env[61545]: DEBUG oslo_vmware.api [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255874, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.607891] env[61545]: DEBUG nova.network.neutron [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.669389] env[61545]: DEBUG nova.network.neutron [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Updating instance_info_cache with network_info: [{"id": "52cec3f8-5316-4f38-86e3-82087b8e5fac", "address": "fa:16:3e:07:8c:49", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cec3f8-53", "ovs_interfaceid": "52cec3f8-5316-4f38-86e3-82087b8e5fac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.765722] env[61545]: DEBUG nova.network.neutron [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.846396] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e5211a-f598-e493-e039-1430084c868c, 'name': SearchDatastore_Task, 'duration_secs': 0.01381} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.850783] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3eaff26-6670-43a4-ab6b-3e6fc251f522 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.857917] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 875.857917] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52551567-5272-dbf4-bf69-57489ece6cd4" [ 875.857917] env[61545]: _type = "Task" [ 875.857917] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.863516] env[61545]: DEBUG nova.network.neutron [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Updated VIF entry in instance network info cache for port 8564beef-15a1-4c85-b090-c45df7394356. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 875.864129] env[61545]: DEBUG nova.network.neutron [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Updating instance_info_cache with network_info: [{"id": "8564beef-15a1-4c85-b090-c45df7394356", "address": "fa:16:3e:22:66:bd", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8564beef-15", "ovs_interfaceid": "8564beef-15a1-4c85-b090-c45df7394356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.867389] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a566b22-ac34-4308-9430-c27e9b2c87e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.874242] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52551567-5272-dbf4-bf69-57489ece6cd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.881231] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a188962b-251a-4ad1-95a2-e36f752fed52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.918533] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63375d3-afce-4cdf-ac5a-fd63fd1ab4e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.927497] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a59546a-95f9-4014-836a-3fb9f28e75e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.943725] env[61545]: DEBUG nova.compute.provider_tree [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.990229] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.070914] env[61545]: DEBUG oslo_vmware.api [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Task: {'id': task-4255874, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241059} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.070914] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.070914] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 876.070914] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 876.071277] env[61545]: INFO nova.compute.manager [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Took 1.14 seconds to destroy the instance on the hypervisor. [ 876.071277] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.071457] env[61545]: DEBUG nova.compute.manager [-] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 876.071555] env[61545]: DEBUG nova.network.neutron [-] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.081434] env[61545]: DEBUG nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 876.108262] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 876.108498] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.108656] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 876.108850] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.108999] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 876.109279] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 876.109521] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 876.109727] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 876.113031] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 876.113031] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 876.113031] env[61545]: DEBUG nova.virt.hardware [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 876.113031] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba9a7fc-c175-4bc2-b608-38326e86d3d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.126336] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad205dd-012e-43f7-8bd4-1c4e25e063ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.175173] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.175689] env[61545]: DEBUG nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Instance network_info: |[{"id": "52cec3f8-5316-4f38-86e3-82087b8e5fac", "address": "fa:16:3e:07:8c:49", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cec3f8-53", "ovs_interfaceid": "52cec3f8-5316-4f38-86e3-82087b8e5fac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 876.178735] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:8c:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52cec3f8-5316-4f38-86e3-82087b8e5fac', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.184703] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Creating folder: Project (cd3ea97039a04935931355f1b8c10ed7). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.185131] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8a053fd-3e2c-4b51-902d-3e4e0c5a7ed7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.200068] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Created folder: Project (cd3ea97039a04935931355f1b8c10ed7) in parent group-v838542. [ 876.200165] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Creating folder: Instances. Parent ref: group-v838688. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.200770] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ca258bd-4670-42b3-b23b-f01c647dfc95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.213324] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Created folder: Instances in parent group-v838688. [ 876.213324] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.213324] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.213614] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-460bce3d-1f9b-49bd-9f52-286ed5a3c78e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.236275] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.236275] env[61545]: value = "task-4255877" [ 876.236275] env[61545]: _type = "Task" [ 876.236275] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.249307] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255877, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.273099] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Releasing lock "refresh_cache-602bd42d-6afa-4419-8352-73a9daab2fe0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.273160] env[61545]: DEBUG nova.compute.manager [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 876.275265] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 876.275265] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1f7e8b-a4ee-4e3e-93f2-b5b771192034 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.283471] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.283809] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12cbfeaf-9e22-4be7-890f-bdcf142eb4bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.291635] env[61545]: DEBUG oslo_vmware.api [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 876.291635] env[61545]: value = "task-4255878" [ 876.291635] env[61545]: _type = "Task" [ 876.291635] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.304253] env[61545]: DEBUG oslo_vmware.api [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.379076] env[61545]: DEBUG oslo_concurrency.lockutils [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] Releasing lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.379076] env[61545]: DEBUG nova.compute.manager [req-54315e94-3d50-4cd3-b05d-3cacc2581988 req-b498a490-9b50-4eb9-aa23-e84c95bd73ad service nova] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Received event network-vif-deleted-3ecc6a7f-17b3-4e11-92bd-19f00ab1364e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 876.379076] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52551567-5272-dbf4-bf69-57489ece6cd4, 'name': SearchDatastore_Task, 'duration_secs': 0.015484} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.379580] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.380016] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74/e3742aa7-0b26-41f5-b8c0-9388ef2b7e74.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 876.381131] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbed11ed-6fb2-4959-a409-d559615fa255 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.396439] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 876.396439] env[61545]: value = "task-4255879" [ 876.396439] env[61545]: _type = "Task" [ 876.396439] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.408633] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.455430] env[61545]: DEBUG nova.scheduler.client.report [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 876.491224] env[61545]: DEBUG nova.compute.manager [req-cfa135da-c902-41c6-bee9-d7cc7cfc0262 req-0d503617-8e58-45e5-b00e-b2ca4bd5d907 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Received event network-changed-52cec3f8-5316-4f38-86e3-82087b8e5fac {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 876.491224] env[61545]: DEBUG nova.compute.manager [req-cfa135da-c902-41c6-bee9-d7cc7cfc0262 req-0d503617-8e58-45e5-b00e-b2ca4bd5d907 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Refreshing instance network info cache due to event network-changed-52cec3f8-5316-4f38-86e3-82087b8e5fac. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 876.491571] env[61545]: DEBUG oslo_concurrency.lockutils [req-cfa135da-c902-41c6-bee9-d7cc7cfc0262 req-0d503617-8e58-45e5-b00e-b2ca4bd5d907 service nova] Acquiring lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.491641] env[61545]: DEBUG oslo_concurrency.lockutils [req-cfa135da-c902-41c6-bee9-d7cc7cfc0262 req-0d503617-8e58-45e5-b00e-b2ca4bd5d907 service nova] Acquired lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.491814] env[61545]: DEBUG nova.network.neutron [req-cfa135da-c902-41c6-bee9-d7cc7cfc0262 req-0d503617-8e58-45e5-b00e-b2ca4bd5d907 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Refreshing network info cache for port 52cec3f8-5316-4f38-86e3-82087b8e5fac {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 876.653636] env[61545]: DEBUG nova.compute.manager [req-10b6e2e0-c44d-47e0-b20e-45b171797aae req-510213dd-1572-4ad6-8bde-d2a6f3e850dd service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Received event network-vif-deleted-3f455c93-7dce-4d43-b98a-ed7d25682f00 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 876.653869] env[61545]: INFO nova.compute.manager [req-10b6e2e0-c44d-47e0-b20e-45b171797aae req-510213dd-1572-4ad6-8bde-d2a6f3e850dd service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Neutron deleted interface 3f455c93-7dce-4d43-b98a-ed7d25682f00; detaching it from the instance and deleting it from the info cache [ 876.654598] env[61545]: DEBUG nova.network.neutron [req-10b6e2e0-c44d-47e0-b20e-45b171797aae req-510213dd-1572-4ad6-8bde-d2a6f3e850dd service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.750159] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255877, 'name': CreateVM_Task, 'duration_secs': 0.499852} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.750425] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 876.751311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.751529] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.752164] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 876.752297] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0872f37a-c5f5-4df2-956a-d896bd097192 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.760577] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 876.760577] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5256335e-b7a8-c03d-8e51-b2f28d8413ff" [ 876.760577] env[61545]: _type = "Task" [ 876.760577] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.775567] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5256335e-b7a8-c03d-8e51-b2f28d8413ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.784947] env[61545]: DEBUG nova.network.neutron [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Successfully updated port: 14efafb4-97cd-4720-a2dd-36f9af0a8644 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.804750] env[61545]: DEBUG oslo_vmware.api [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255878, 'name': PowerOffVM_Task, 'duration_secs': 0.190203} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.805427] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.805598] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.805884] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-067e8825-16a2-4622-8274-fd984d85a7bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.839265] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.839656] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.839837] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Deleting the datastore file [datastore2] 602bd42d-6afa-4419-8352-73a9daab2fe0 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.840208] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-171449cc-c2da-4431-8c88-5f4808a3c99c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.848101] env[61545]: DEBUG oslo_vmware.api [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 876.848101] env[61545]: value = "task-4255881" [ 876.848101] env[61545]: _type = "Task" [ 876.848101] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.858744] env[61545]: DEBUG oslo_vmware.api [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.871756] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.871756] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.871756] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.871756] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.871756] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.872667] env[61545]: INFO nova.compute.manager [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Terminating instance [ 876.909826] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255879, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.964674] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.913s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.965416] env[61545]: DEBUG nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 876.969300] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.987s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.970983] env[61545]: DEBUG nova.objects.instance [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lazy-loading 'resources' on Instance uuid 6f2a4514-4de9-427d-91be-f445235696bf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 877.006053] env[61545]: DEBUG nova.network.neutron [-] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.160459] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-494592a5-df27-4402-808f-e89e3bfb09fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.174986] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d78afa6-f648-4813-a6f1-6cf0a5ca1a61 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.219898] env[61545]: DEBUG nova.compute.manager [req-10b6e2e0-c44d-47e0-b20e-45b171797aae req-510213dd-1572-4ad6-8bde-d2a6f3e850dd service nova] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Detach interface failed, port_id=3f455c93-7dce-4d43-b98a-ed7d25682f00, reason: Instance ecf98c79-da3d-44be-9c76-c3fccc688235 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 877.274599] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5256335e-b7a8-c03d-8e51-b2f28d8413ff, 'name': SearchDatastore_Task, 'duration_secs': 0.064794} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.275297] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.275415] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.275611] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.275760] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.275979] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.276299] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccb0706d-4d00-4855-b2b6-5efb51656073 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.287550] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "refresh_cache-9b62358e-c834-461c-9954-49f513b0f4ac" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.287719] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "refresh_cache-9b62358e-c834-461c-9954-49f513b0f4ac" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.287875] env[61545]: DEBUG nova.network.neutron [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.289251] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.289491] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.290335] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f301a6f8-c228-4d4b-b29b-14fad0e74cad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.297904] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 877.297904] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b653ea-3a4f-27d1-9bce-f7d58ae06b45" [ 877.297904] env[61545]: _type = "Task" [ 877.297904] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.310761] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b653ea-3a4f-27d1-9bce-f7d58ae06b45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.325449] env[61545]: DEBUG nova.network.neutron [req-cfa135da-c902-41c6-bee9-d7cc7cfc0262 req-0d503617-8e58-45e5-b00e-b2ca4bd5d907 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Updated VIF entry in instance network info cache for port 52cec3f8-5316-4f38-86e3-82087b8e5fac. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 877.326157] env[61545]: DEBUG nova.network.neutron [req-cfa135da-c902-41c6-bee9-d7cc7cfc0262 req-0d503617-8e58-45e5-b00e-b2ca4bd5d907 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Updating instance_info_cache with network_info: [{"id": "52cec3f8-5316-4f38-86e3-82087b8e5fac", "address": "fa:16:3e:07:8c:49", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cec3f8-53", "ovs_interfaceid": "52cec3f8-5316-4f38-86e3-82087b8e5fac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.366467] env[61545]: DEBUG oslo_vmware.api [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260715} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.366467] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.366954] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 877.367244] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.367450] env[61545]: INFO nova.compute.manager [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Took 1.09 seconds to destroy the instance on the hypervisor. [ 877.367675] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.367901] env[61545]: DEBUG nova.compute.manager [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 877.368009] env[61545]: DEBUG nova.network.neutron [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 877.379147] env[61545]: DEBUG nova.compute.manager [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 877.379387] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.380302] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3060c53-786c-4e01-a97e-e77dede6b9fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.388965] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.390180] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8304731d-7083-47ee-991e-111df10c2df9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.392490] env[61545]: DEBUG nova.network.neutron [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.400452] env[61545]: DEBUG oslo_vmware.api [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 877.400452] env[61545]: value = "task-4255882" [ 877.400452] env[61545]: _type = "Task" [ 877.400452] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.414227] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255879, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607059} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.419515] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74/e3742aa7-0b26-41f5-b8c0-9388ef2b7e74.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.419753] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.420633] env[61545]: DEBUG oslo_vmware.api [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.420899] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a71da49-75f3-43ef-9c40-f7633c705bd7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.430359] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 877.430359] env[61545]: value = "task-4255883" [ 877.430359] env[61545]: _type = "Task" [ 877.430359] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.444270] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255883, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.473642] env[61545]: DEBUG nova.compute.utils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 877.474705] env[61545]: DEBUG nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 877.474705] env[61545]: DEBUG nova.network.neutron [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 877.514035] env[61545]: INFO nova.compute.manager [-] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Took 1.44 seconds to deallocate network for instance. [ 877.547355] env[61545]: DEBUG nova.policy [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7a70fb8ea2d498688688f7e51cf4bac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45efa52890714522b3058b7144b42a89', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 877.747735] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.748751] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.826206] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b653ea-3a4f-27d1-9bce-f7d58ae06b45, 'name': SearchDatastore_Task, 'duration_secs': 0.017233} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.827267] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46b170e1-f3d5-4c2a-af7b-8d0756df98d9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.830244] env[61545]: DEBUG oslo_concurrency.lockutils [req-cfa135da-c902-41c6-bee9-d7cc7cfc0262 req-0d503617-8e58-45e5-b00e-b2ca4bd5d907 service nova] Releasing lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.835781] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 877.835781] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5259cfe8-9e4d-c454-906c-98ff6e019506" [ 877.835781] env[61545]: _type = "Task" [ 877.835781] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.845243] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5259cfe8-9e4d-c454-906c-98ff6e019506, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.873036] env[61545]: DEBUG nova.network.neutron [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.895506] env[61545]: DEBUG nova.network.neutron [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.915706] env[61545]: DEBUG oslo_vmware.api [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255882, 'name': PowerOffVM_Task, 'duration_secs': 0.286654} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.918502] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.918734] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 877.919365] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30035907-5133-4fb2-bcb1-1fdc8e320b47 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.942071] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255883, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090048} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.942536] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.943450] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee62cf7-34b4-4461-bf1d-21556e32ccd1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.972505] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74/e3742aa7-0b26-41f5-b8c0-9388ef2b7e74.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.972884] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e6f669d-cd0d-4e55-aa5a-2341b8be5e45 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.988076] env[61545]: DEBUG nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 878.002559] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 878.002559] env[61545]: value = "task-4255885" [ 878.002559] env[61545]: _type = "Task" [ 878.002559] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.017933] env[61545]: DEBUG nova.network.neutron [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Successfully created port: d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.021252] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.021363] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.021527] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Deleting the datastore file [datastore2] e8c954ec-de76-4d3e-9a63-6c30523d5b63 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.023780] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e7f7e07-e6fc-4857-ba67-c07052dc3e66 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.034309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.034641] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255885, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.043338] env[61545]: DEBUG oslo_vmware.api [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 878.043338] env[61545]: value = "task-4255886" [ 878.043338] env[61545]: _type = "Task" [ 878.043338] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.058961] env[61545]: DEBUG oslo_vmware.api [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255886, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.089595] env[61545]: DEBUG oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe8699-5953-bb89-c38a-4b46005666ef/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 878.091039] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9e7752-998c-4565-b2eb-6a9286e1c054 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.097870] env[61545]: DEBUG oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe8699-5953-bb89-c38a-4b46005666ef/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 878.098218] env[61545]: ERROR oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe8699-5953-bb89-c38a-4b46005666ef/disk-0.vmdk due to incomplete transfer. [ 878.098304] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c01f627b-228f-42c6-a85d-a07b1bbcd0ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.109033] env[61545]: DEBUG nova.network.neutron [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Updating instance_info_cache with network_info: [{"id": "14efafb4-97cd-4720-a2dd-36f9af0a8644", "address": "fa:16:3e:dc:09:46", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14efafb4-97", "ovs_interfaceid": "14efafb4-97cd-4720-a2dd-36f9af0a8644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.111399] env[61545]: DEBUG oslo_vmware.rw_handles [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe8699-5953-bb89-c38a-4b46005666ef/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 878.111399] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Uploaded image a6f04a5f-3f40-4297-bbe6-7cf21745fa53 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 878.113512] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 878.114208] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-64e0c330-a3cf-4cdf-b59a-6ae86e82e236 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.126205] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 878.126205] env[61545]: value = "task-4255887" [ 878.126205] env[61545]: _type = "Task" [ 878.126205] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.137893] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255887, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.164626] env[61545]: DEBUG nova.compute.manager [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 878.347119] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5259cfe8-9e4d-c454-906c-98ff6e019506, 'name': SearchDatastore_Task, 'duration_secs': 0.014999} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.347349] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.347622] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 62301196-fb8a-45fe-9193-0ad8f7126ab5/62301196-fb8a-45fe-9193-0ad8f7126ab5.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.350981] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1568db35-9211-4dea-8119-ba6d3ab52ed4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.359854] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 878.359854] env[61545]: value = "task-4255888" [ 878.359854] env[61545]: _type = "Task" [ 878.359854] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.372022] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.372022] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1460cb-9d7a-4ed0-900e-c9335c5f3ecf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.379085] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf89b61c-ab61-4bfd-a288-450ef0cb62f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.412194] env[61545]: INFO nova.compute.manager [-] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Took 1.04 seconds to deallocate network for instance. [ 878.418019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64322feb-16b2-4b63-8993-368a345ce68c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.429687] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdd2484-c615-4486-a50e-fad3466b298b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.447135] env[61545]: DEBUG nova.compute.provider_tree [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.516413] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255885, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.556488] env[61545]: DEBUG oslo_vmware.api [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4255886, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212989} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.558110] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.558110] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.558110] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.558110] env[61545]: INFO nova.compute.manager [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Took 1.18 seconds to destroy the instance on the hypervisor. [ 878.558354] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 878.558607] env[61545]: DEBUG nova.compute.manager [-] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 878.558731] env[61545]: DEBUG nova.network.neutron [-] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.615604] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "refresh_cache-9b62358e-c834-461c-9954-49f513b0f4ac" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.615948] env[61545]: DEBUG nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Instance network_info: |[{"id": "14efafb4-97cd-4720-a2dd-36f9af0a8644", "address": "fa:16:3e:dc:09:46", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14efafb4-97", "ovs_interfaceid": "14efafb4-97cd-4720-a2dd-36f9af0a8644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 878.616426] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:09:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14efafb4-97cd-4720-a2dd-36f9af0a8644', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.625058] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 878.625320] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.625599] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcf06809-a7cb-4cc5-9eb6-7aaa3853a8c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.653211] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255887, 'name': Destroy_Task, 'duration_secs': 0.394786} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.653532] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Destroyed the VM [ 878.653791] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 878.654787] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fc911661-71c9-42bd-b4e0-91ba2ddd8257 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.657949] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.657949] env[61545]: value = "task-4255889" [ 878.657949] env[61545]: _type = "Task" [ 878.657949] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.662676] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquiring lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.662676] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.663218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquiring lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.663218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.663218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.670449] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255889, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.670930] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 878.670930] env[61545]: value = "task-4255890" [ 878.670930] env[61545]: _type = "Task" [ 878.670930] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.671532] env[61545]: INFO nova.compute.manager [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Terminating instance [ 878.689627] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255890, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.695845] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.747749] env[61545]: DEBUG nova.compute.manager [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Received event network-vif-plugged-14efafb4-97cd-4720-a2dd-36f9af0a8644 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 878.747749] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] Acquiring lock "9b62358e-c834-461c-9954-49f513b0f4ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.747749] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] Lock "9b62358e-c834-461c-9954-49f513b0f4ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.747749] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] Lock "9b62358e-c834-461c-9954-49f513b0f4ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.747749] env[61545]: DEBUG nova.compute.manager [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] No waiting events found dispatching network-vif-plugged-14efafb4-97cd-4720-a2dd-36f9af0a8644 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 878.747749] env[61545]: WARNING nova.compute.manager [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Received unexpected event network-vif-plugged-14efafb4-97cd-4720-a2dd-36f9af0a8644 for instance with vm_state building and task_state spawning. [ 878.748889] env[61545]: DEBUG nova.compute.manager [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Received event network-changed-14efafb4-97cd-4720-a2dd-36f9af0a8644 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 878.748889] env[61545]: DEBUG nova.compute.manager [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Refreshing instance network info cache due to event network-changed-14efafb4-97cd-4720-a2dd-36f9af0a8644. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 878.748889] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] Acquiring lock "refresh_cache-9b62358e-c834-461c-9954-49f513b0f4ac" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.748889] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] Acquired lock "refresh_cache-9b62358e-c834-461c-9954-49f513b0f4ac" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.748889] env[61545]: DEBUG nova.network.neutron [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Refreshing network info cache for port 14efafb4-97cd-4720-a2dd-36f9af0a8644 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.870199] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255888, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.923607] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.953545] env[61545]: DEBUG nova.scheduler.client.report [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.998750] env[61545]: DEBUG nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 879.015886] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255885, 'name': ReconfigVM_Task, 'duration_secs': 0.832273} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.016240] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Reconfigured VM instance instance-00000033 to attach disk [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74/e3742aa7-0b26-41f5-b8c0-9388ef2b7e74.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.017186] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b1912d9-82e2-4685-bf7b-409f16c09927 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.027159] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 879.027159] env[61545]: value = "task-4255891" [ 879.027159] env[61545]: _type = "Task" [ 879.027159] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.036872] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 879.037373] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.037659] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 879.037993] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.038270] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 879.038545] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 879.038876] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 879.039217] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 879.039512] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 879.039793] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 879.040120] env[61545]: DEBUG nova.virt.hardware [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 879.041180] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8520e6a-b85c-4332-8db5-80d9e135e3c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.050543] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255891, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.057130] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d8a096-b256-48eb-a3d0-6628a9dc4e1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.173146] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255889, 'name': CreateVM_Task, 'duration_secs': 0.431426} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.173597] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 879.182122] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.182122] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.182122] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 879.182122] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-153122f1-1ace-4d23-ba69-b4d91df20c06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.185236] env[61545]: DEBUG nova.compute.manager [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 879.186584] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.187198] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9254814-a874-4266-a0d8-504e358ad42a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.200316] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255890, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.203015] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 879.203015] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520b6529-6ad6-b76f-0dea-c415845eadb0" [ 879.203015] env[61545]: _type = "Task" [ 879.203015] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.211321] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.212153] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e54f476d-d098-4c48-94e8-11dc2932c181 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.218566] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520b6529-6ad6-b76f-0dea-c415845eadb0, 'name': SearchDatastore_Task, 'duration_secs': 0.010987} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.219307] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.219543] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.219799] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.220319] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.220319] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.220474] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-437fc92c-87ec-4cb6-a615-9b49f951b9a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.224231] env[61545]: DEBUG oslo_vmware.api [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 879.224231] env[61545]: value = "task-4255892" [ 879.224231] env[61545]: _type = "Task" [ 879.224231] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.232779] env[61545]: DEBUG oslo_vmware.api [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.234071] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.234260] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.235034] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2348012-21b9-43a1-818f-7778e96d4c59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.241405] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 879.241405] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52779fd7-a3a9-3625-dd3c-8ac9390add4c" [ 879.241405] env[61545]: _type = "Task" [ 879.241405] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.250896] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52779fd7-a3a9-3625-dd3c-8ac9390add4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.370959] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513252} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.371249] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 62301196-fb8a-45fe-9193-0ad8f7126ab5/62301196-fb8a-45fe-9193-0ad8f7126ab5.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.371467] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.371738] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35341797-0411-4543-b931-135e8026100e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.379411] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 879.379411] env[61545]: value = "task-4255893" [ 879.379411] env[61545]: _type = "Task" [ 879.379411] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.390365] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.421085] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "13db992b-db13-451f-a853-9b7de28b9184" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.421399] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "13db992b-db13-451f-a853-9b7de28b9184" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.421683] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "13db992b-db13-451f-a853-9b7de28b9184-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.421959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "13db992b-db13-451f-a853-9b7de28b9184-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.422188] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "13db992b-db13-451f-a853-9b7de28b9184-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.425179] env[61545]: INFO nova.compute.manager [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Terminating instance [ 879.456898] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.488s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.459341] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.701s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.461594] env[61545]: INFO nova.compute.claims [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 879.488246] env[61545]: INFO nova.scheduler.client.report [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Deleted allocations for instance 6f2a4514-4de9-427d-91be-f445235696bf [ 879.538101] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255891, 'name': Rename_Task, 'duration_secs': 0.22298} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.538101] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.538372] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e055c7d-0271-42c4-8783-126ac3bfba72 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.549429] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 879.549429] env[61545]: value = "task-4255894" [ 879.549429] env[61545]: _type = "Task" [ 879.549429] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.568817] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.685714] env[61545]: DEBUG oslo_vmware.api [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255890, 'name': RemoveSnapshot_Task, 'duration_secs': 0.672314} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.686059] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 879.686305] env[61545]: INFO nova.compute.manager [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Took 16.93 seconds to snapshot the instance on the hypervisor. [ 879.735091] env[61545]: DEBUG oslo_vmware.api [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255892, 'name': PowerOffVM_Task, 'duration_secs': 0.244825} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.735372] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.735524] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.736014] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36a7fd4a-32d8-41a0-8e0b-64dcd3b80c1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.748410] env[61545]: DEBUG nova.network.neutron [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Successfully updated port: d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.752491] env[61545]: DEBUG nova.network.neutron [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Updated VIF entry in instance network info cache for port 14efafb4-97cd-4720-a2dd-36f9af0a8644. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.754036] env[61545]: DEBUG nova.network.neutron [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Updating instance_info_cache with network_info: [{"id": "14efafb4-97cd-4720-a2dd-36f9af0a8644", "address": "fa:16:3e:dc:09:46", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14efafb4-97", "ovs_interfaceid": "14efafb4-97cd-4720-a2dd-36f9af0a8644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.761322] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52779fd7-a3a9-3625-dd3c-8ac9390add4c, 'name': SearchDatastore_Task, 'duration_secs': 0.019645} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.762497] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8247bfd-4ca6-4374-a486-85626cf8a2d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.771328] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 879.771328] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d8df8f-ae17-3f33-a437-1344289a081f" [ 879.771328] env[61545]: _type = "Task" [ 879.771328] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.782284] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d8df8f-ae17-3f33-a437-1344289a081f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.808188] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.808484] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.808773] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Deleting the datastore file [datastore2] 0554c462-1dc5-4043-94ac-7a3d28ed05e1 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.809299] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40dbe1d1-17e6-4933-90f8-27cebd0eb188 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.816481] env[61545]: DEBUG oslo_vmware.api [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for the task: (returnval){ [ 879.816481] env[61545]: value = "task-4255896" [ 879.816481] env[61545]: _type = "Task" [ 879.816481] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.826419] env[61545]: DEBUG oslo_vmware.api [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.890714] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072749} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.891105] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.891988] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc64ebe-0047-41da-8916-755f8cd15e4c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.920379] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 62301196-fb8a-45fe-9193-0ad8f7126ab5/62301196-fb8a-45fe-9193-0ad8f7126ab5.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.920855] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86ee7744-43ad-48e6-96b6-a4fb1912356a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.941261] env[61545]: DEBUG nova.compute.manager [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 879.941559] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.942508] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc2b784-e6be-45e9-8450-5acddf6b4ca1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.951487] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.953113] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d277e401-a495-4898-8880-3e0755c53c5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.955349] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 879.955349] env[61545]: value = "task-4255897" [ 879.955349] env[61545]: _type = "Task" [ 879.955349] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.968596] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255897, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.002636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8a49b5b4-3b79-4d06-a666-bd95fd27cfbf tempest-ServersAaction247Test-759052924 tempest-ServersAaction247Test-759052924-project-member] Lock "6f2a4514-4de9-427d-91be-f445235696bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.340s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.029222] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 880.029463] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 880.029646] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleting the datastore file [datastore2] 13db992b-db13-451f-a853-9b7de28b9184 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.030401] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47080b2e-aec8-442f-8f9f-4ec0c81f5ab2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.039270] env[61545]: DEBUG oslo_vmware.api [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 880.039270] env[61545]: value = "task-4255899" [ 880.039270] env[61545]: _type = "Task" [ 880.039270] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.044877] env[61545]: DEBUG nova.compute.manager [req-698eec07-d69d-40ed-a308-66a4245ce82d req-4aa5c675-7a8b-424c-851c-7fd244d2c0cc service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Received event network-vif-deleted-53f2ef34-f0c8-46d5-86d1-b21ecc745ad5 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 880.045681] env[61545]: INFO nova.compute.manager [req-698eec07-d69d-40ed-a308-66a4245ce82d req-4aa5c675-7a8b-424c-851c-7fd244d2c0cc service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Neutron deleted interface 53f2ef34-f0c8-46d5-86d1-b21ecc745ad5; detaching it from the instance and deleting it from the info cache [ 880.045681] env[61545]: DEBUG nova.network.neutron [req-698eec07-d69d-40ed-a308-66a4245ce82d req-4aa5c675-7a8b-424c-851c-7fd244d2c0cc service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.050766] env[61545]: DEBUG oslo_vmware.api [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255899, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.060556] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255894, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.079619] env[61545]: DEBUG nova.network.neutron [-] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.190287] env[61545]: DEBUG nova.compute.manager [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Instance disappeared during snapshot {{(pid=61545) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 880.204214] env[61545]: DEBUG nova.compute.manager [None req-e1a06b72-14eb-4943-be6a-7e7954c809dd tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image not found during clean up a6f04a5f-3f40-4297-bbe6-7cf21745fa53 {{(pid=61545) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 880.253463] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.253463] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.253463] env[61545]: DEBUG nova.network.neutron [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 880.263839] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf9e4f1-671e-47e4-8b9a-67f0ef5bc35c req-f247a1be-469f-42b5-8f6b-8a6e488767a6 service nova] Releasing lock "refresh_cache-9b62358e-c834-461c-9954-49f513b0f4ac" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.282876] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d8df8f-ae17-3f33-a437-1344289a081f, 'name': SearchDatastore_Task, 'duration_secs': 0.01386} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.283234] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.283569] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 9b62358e-c834-461c-9954-49f513b0f4ac/9b62358e-c834-461c-9954-49f513b0f4ac.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.283815] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14a9e093-70a1-466c-82b3-35f38c1619ca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.292595] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 880.292595] env[61545]: value = "task-4255900" [ 880.292595] env[61545]: _type = "Task" [ 880.292595] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.304014] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255900, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.332985] env[61545]: DEBUG oslo_vmware.api [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Task: {'id': task-4255896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148225} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.332985] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.332985] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.333211] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.333484] env[61545]: INFO nova.compute.manager [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 880.333772] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.333981] env[61545]: DEBUG nova.compute.manager [-] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.334107] env[61545]: DEBUG nova.network.neutron [-] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.466591] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255897, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.556723] env[61545]: DEBUG oslo_vmware.api [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4255899, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157589} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.561642] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-354454c7-e570-4535-b708-957fcb54987c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.568942] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.569405] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.569838] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.570231] env[61545]: INFO nova.compute.manager [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Took 0.63 seconds to destroy the instance on the hypervisor. [ 880.570680] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.572012] env[61545]: DEBUG nova.compute.manager [-] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.572220] env[61545]: DEBUG nova.network.neutron [-] [instance: 13db992b-db13-451f-a853-9b7de28b9184] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.583190] env[61545]: INFO nova.compute.manager [-] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Took 2.02 seconds to deallocate network for instance. [ 880.585296] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255894, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.597345] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5830e2d2-df6d-42b7-9d65-b501b0c89808 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.652027] env[61545]: DEBUG nova.compute.manager [req-698eec07-d69d-40ed-a308-66a4245ce82d req-4aa5c675-7a8b-424c-851c-7fd244d2c0cc service nova] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Detach interface failed, port_id=53f2ef34-f0c8-46d5-86d1-b21ecc745ad5, reason: Instance e8c954ec-de76-4d3e-9a63-6c30523d5b63 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 880.663047] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "578ce929-99fd-47ae-8275-e4ac9abe8d49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.663874] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "578ce929-99fd-47ae-8275-e4ac9abe8d49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.812304] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255900, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.913803] env[61545]: DEBUG nova.network.neutron [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.969194] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255897, 'name': ReconfigVM_Task, 'duration_secs': 0.956098} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.971955] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 62301196-fb8a-45fe-9193-0ad8f7126ab5/62301196-fb8a-45fe-9193-0ad8f7126ab5.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.972860] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bb95a94-e7cb-4b92-99c3-e0d9a65e7af0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.981785] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 880.981785] env[61545]: value = "task-4255901" [ 880.981785] env[61545]: _type = "Task" [ 880.981785] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.995622] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255901, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.074232] env[61545]: DEBUG oslo_vmware.api [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255894, 'name': PowerOnVM_Task, 'duration_secs': 1.203632} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.074537] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.074953] env[61545]: INFO nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Took 10.86 seconds to spawn the instance on the hypervisor. [ 881.075218] env[61545]: DEBUG nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 881.076505] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea4d3e3-f069-46c6-8430-71a807319e8a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.118275] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.131723] env[61545]: DEBUG nova.compute.manager [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Received event network-vif-plugged-d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 881.132014] env[61545]: DEBUG oslo_concurrency.lockutils [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] Acquiring lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.132242] env[61545]: DEBUG oslo_concurrency.lockutils [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.132613] env[61545]: DEBUG oslo_concurrency.lockutils [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.132613] env[61545]: DEBUG nova.compute.manager [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] No waiting events found dispatching network-vif-plugged-d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 881.132811] env[61545]: WARNING nova.compute.manager [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Received unexpected event network-vif-plugged-d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f for instance with vm_state building and task_state spawning. [ 881.132964] env[61545]: DEBUG nova.compute.manager [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Received event network-changed-d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 881.133172] env[61545]: DEBUG nova.compute.manager [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Refreshing instance network info cache due to event network-changed-d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 881.133520] env[61545]: DEBUG oslo_concurrency.lockutils [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] Acquiring lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.313071] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255900, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.724091} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.316407] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 9b62358e-c834-461c-9954-49f513b0f4ac/9b62358e-c834-461c-9954-49f513b0f4ac.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.317211] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.324408] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d2b12f9-246f-4cdd-b15b-72aaa1564e8f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.333537] env[61545]: DEBUG nova.network.neutron [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance_info_cache with network_info: [{"id": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "address": "fa:16:3e:c5:11:ab", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd36c7f3a-2a", "ovs_interfaceid": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.341593] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 881.341593] env[61545]: value = "task-4255902" [ 881.341593] env[61545]: _type = "Task" [ 881.341593] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.364231] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255902, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.404916] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32f7381-e8b3-45f9-bffe-4df28ae39b71 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.414214] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9415e238-ded8-4e61-bbab-e9e0d1fe5300 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.446386] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ed3e76-2bb2-4108-b3ab-f4d40648777b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.454411] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a7c1e7-0f12-46f9-9e37-868be68b356e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.468582] env[61545]: DEBUG nova.compute.provider_tree [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.492534] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255901, 'name': Rename_Task, 'duration_secs': 0.354935} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.492534] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 881.492534] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fba40f3d-4c12-4095-ab97-22b4eac29046 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.499590] env[61545]: DEBUG nova.network.neutron [-] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.502590] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 881.502590] env[61545]: value = "task-4255903" [ 881.502590] env[61545]: _type = "Task" [ 881.502590] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.512534] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255903, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.609253] env[61545]: INFO nova.compute.manager [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Took 42.92 seconds to build instance. [ 881.837290] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.837644] env[61545]: DEBUG nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Instance network_info: |[{"id": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "address": "fa:16:3e:c5:11:ab", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd36c7f3a-2a", "ovs_interfaceid": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 881.837980] env[61545]: DEBUG oslo_concurrency.lockutils [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] Acquired lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.838291] env[61545]: DEBUG nova.network.neutron [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Refreshing network info cache for port d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.839750] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:11:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd36c7f3a-2a3b-44a8-bfe6-1af85f921b3f', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.848471] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating folder: Project (45efa52890714522b3058b7144b42a89). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.849858] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa0a4b00-e69e-408d-bbcf-ab88c9cd0dc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.864162] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255902, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075297} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.864850] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.865777] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3470093-075b-4491-893d-43f469e3a254 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.870282] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Created folder: Project (45efa52890714522b3058b7144b42a89) in parent group-v838542. [ 881.870575] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating folder: Instances. Parent ref: group-v838692. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.871269] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7d2fa5c-bbbb-448f-9efb-057c74499eaa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.892864] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 9b62358e-c834-461c-9954-49f513b0f4ac/9b62358e-c834-461c-9954-49f513b0f4ac.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.893177] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2b523e7-b767-491d-aa81-10de561a9463 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.909637] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Created folder: Instances in parent group-v838692. [ 881.909906] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 881.910524] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.910790] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dea5c089-f78b-463c-8a2a-8846f45ab868 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.926564] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 881.926564] env[61545]: value = "task-4255906" [ 881.926564] env[61545]: _type = "Task" [ 881.926564] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.932604] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.932604] env[61545]: value = "task-4255907" [ 881.932604] env[61545]: _type = "Task" [ 881.932604] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.939440] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.947027] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255907, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.972550] env[61545]: DEBUG nova.scheduler.client.report [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.007685] env[61545]: INFO nova.compute.manager [-] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Took 1.44 seconds to deallocate network for instance. [ 882.017466] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255903, 'name': PowerOnVM_Task} progress is 76%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.089755] env[61545]: DEBUG nova.network.neutron [-] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.116026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53281cb3-7137-47e7-8a92-6b56675d5776 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.200s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.344792] env[61545]: DEBUG nova.compute.manager [req-554accbb-bfbc-4870-bb75-129b54537a22 req-c5cae995-0b0d-4d8c-aca1-432db0606e6d service nova] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Received event network-vif-deleted-400a0ea3-0087-4d35-bc44-2849c40231e6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 882.441083] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255906, 'name': ReconfigVM_Task, 'duration_secs': 0.315229} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.444577] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 9b62358e-c834-461c-9954-49f513b0f4ac/9b62358e-c834-461c-9954-49f513b0f4ac.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.445582] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8baff7f3-8f5b-47a9-beaf-e51be77a4030 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.455497] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255907, 'name': CreateVM_Task, 'duration_secs': 0.509692} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.457266] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.458632] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 882.458632] env[61545]: value = "task-4255908" [ 882.458632] env[61545]: _type = "Task" [ 882.458632] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.458909] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.459073] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.459560] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 882.460009] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb7894c3-9672-4bf3-af1e-857de39f03be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.471244] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 882.471244] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a476b7-2739-126c-5c9c-afce0cc14150" [ 882.471244] env[61545]: _type = "Task" [ 882.471244] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.473677] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255908, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.478523] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.019s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.478756] env[61545]: DEBUG nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 882.481845] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.709s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.487020] env[61545]: INFO nova.compute.claims [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.493315] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a476b7-2739-126c-5c9c-afce0cc14150, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.513896] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255903, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.519311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.594665] env[61545]: INFO nova.compute.manager [-] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Took 2.26 seconds to deallocate network for instance. [ 882.618585] env[61545]: DEBUG nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 882.850555] env[61545]: DEBUG nova.network.neutron [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updated VIF entry in instance network info cache for port d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 882.850555] env[61545]: DEBUG nova.network.neutron [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance_info_cache with network_info: [{"id": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "address": "fa:16:3e:c5:11:ab", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd36c7f3a-2a", "ovs_interfaceid": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.972504] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255908, 'name': Rename_Task, 'duration_secs': 0.160965} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.972800] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.973074] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff237174-554d-409c-8029-7ea841bb126d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.988169] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a476b7-2739-126c-5c9c-afce0cc14150, 'name': SearchDatastore_Task, 'duration_secs': 0.018902} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.990412] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.990723] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.991017] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.992196] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.992458] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.992884] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 882.992884] env[61545]: value = "task-4255909" [ 882.992884] env[61545]: _type = "Task" [ 882.992884] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.994254] env[61545]: DEBUG nova.compute.utils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 882.996589] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf062cbe-33c0-4eb2-9761-7cb9db3f5649 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.005161] env[61545]: DEBUG nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 883.006169] env[61545]: DEBUG nova.network.neutron [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.027757] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.028134] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 883.038960] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3cd62bd-b556-47b7-b5bd-90416a017738 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.042303] env[61545]: DEBUG oslo_vmware.api [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255903, 'name': PowerOnVM_Task, 'duration_secs': 1.027043} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.042630] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255909, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.043440] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.047080] env[61545]: INFO nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Took 9.77 seconds to spawn the instance on the hypervisor. [ 883.047476] env[61545]: DEBUG nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 883.050434] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed08f26c-0748-4c9d-ab63-5d2908c1c40d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.058081] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 883.058081] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d32f18-4116-0775-3c15-549a4692c044" [ 883.058081] env[61545]: _type = "Task" [ 883.058081] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.076754] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d32f18-4116-0775-3c15-549a4692c044, 'name': SearchDatastore_Task, 'duration_secs': 0.017241} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.077527] env[61545]: DEBUG nova.policy [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cdf64fe2fad47a582da369ec3c378ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd3ea97039a04935931355f1b8c10ed7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 883.079643] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-631ae2e1-fe12-4f92-a3b9-b20f97ab81d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.086185] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 883.086185] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5289552f-9534-3bfe-ab25-7e88d6ce1d14" [ 883.086185] env[61545]: _type = "Task" [ 883.086185] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.094554] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5289552f-9534-3bfe-ab25-7e88d6ce1d14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.096188] env[61545]: INFO nova.compute.manager [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Rescuing [ 883.096295] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.096438] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.096596] env[61545]: DEBUG nova.network.neutron [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.105231] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.146502] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.266786] env[61545]: DEBUG nova.compute.manager [req-16c521aa-28f5-4962-bcbc-4afcab243c8d req-a859f3d9-477e-4479-a233-0a6f117486a0 service nova] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Received event network-vif-deleted-1db9311b-fde5-4366-81bb-0717451a8c25 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 883.353380] env[61545]: DEBUG oslo_concurrency.lockutils [req-d93f3f0c-17fb-4bd4-a8ab-c4bc8a175362 req-27553362-c664-450d-8d98-c54050d8fdff service nova] Releasing lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.508133] env[61545]: DEBUG nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 883.528597] env[61545]: DEBUG oslo_vmware.api [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255909, 'name': PowerOnVM_Task, 'duration_secs': 0.525693} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.530661] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.530661] env[61545]: INFO nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Took 7.45 seconds to spawn the instance on the hypervisor. [ 883.530661] env[61545]: DEBUG nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 883.533513] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207c6a7a-2be1-456f-bda8-a10b18a7d534 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.583042] env[61545]: INFO nova.compute.manager [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Took 42.30 seconds to build instance. [ 883.601373] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5289552f-9534-3bfe-ab25-7e88d6ce1d14, 'name': SearchDatastore_Task, 'duration_secs': 0.010201} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.601677] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.601936] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67/1be4da80-c9ee-424e-b4e3-bdd22eb0cd67.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.602650] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97cbeb9a-c6e0-4464-9cb0-2ba815241b31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.616116] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 883.616116] env[61545]: value = "task-4255910" [ 883.616116] env[61545]: _type = "Task" [ 883.616116] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.627980] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.634285] env[61545]: DEBUG nova.network.neutron [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Successfully created port: 132b9a92-c7de-4fef-9870-cc4a0a7ae9c2 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.055349] env[61545]: INFO nova.compute.manager [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Took 39.34 seconds to build instance. [ 884.085492] env[61545]: DEBUG oslo_concurrency.lockutils [None req-296778e7-4263-45ba-b8fd-2a4fbf8a0e32 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.153s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.127231] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255910, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.128275] env[61545]: DEBUG nova.network.neutron [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Updating instance_info_cache with network_info: [{"id": "8564beef-15a1-4c85-b090-c45df7394356", "address": "fa:16:3e:22:66:bd", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8564beef-15", "ovs_interfaceid": "8564beef-15a1-4c85-b090-c45df7394356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.279647] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc754c4-8f27-480d-81bf-d99a3b28bd4d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.291772] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5412df07-8885-4804-9cdd-bdf760671c13 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.325850] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6de474b-20d3-44b5-8ca4-44f3a4472cac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.334953] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9349277-5489-4286-b601-f7ea60280424 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.349372] env[61545]: DEBUG nova.compute.provider_tree [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.535148] env[61545]: DEBUG nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 884.558755] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12072cfc-db30-4a70-a510-865d2c16bd8c tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "9b62358e-c834-461c-9954-49f513b0f4ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.830s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.576029] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 884.576198] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.576316] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 884.576458] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.576689] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 884.576862] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 884.577108] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 884.577278] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 884.577455] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 884.577671] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 884.577882] env[61545]: DEBUG nova.virt.hardware [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 884.579026] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8e8d8a-fed7-46bc-a1be-9384313c70fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.589035] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c48dfe-cb80-4685-a4db-1e3cad61d760 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.592901] env[61545]: DEBUG nova.compute.manager [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 884.628146] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255910, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514214} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.628396] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67/1be4da80-c9ee-424e-b4e3-bdd22eb0cd67.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.628556] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.628891] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f26fe33d-e0ba-41d0-b554-1716a5b2501f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.631936] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.639948] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 884.639948] env[61545]: value = "task-4255911" [ 884.639948] env[61545]: _type = "Task" [ 884.639948] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.652227] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.853329] env[61545]: DEBUG nova.scheduler.client.report [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.061891] env[61545]: DEBUG nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 885.117835] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.150486] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066481} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.150774] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.151573] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a746908a-7d99-4372-933b-3c4fcd5c5e2f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.176983] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67/1be4da80-c9ee-424e-b4e3-bdd22eb0cd67.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.178084] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0509ba50-e9ec-42eb-b9c6-a0426da38f9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.199544] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 885.199544] env[61545]: value = "task-4255912" [ 885.199544] env[61545]: _type = "Task" [ 885.199544] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.208734] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255912, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.358920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.877s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.359727] env[61545]: DEBUG nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 885.364128] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.467s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.369242] env[61545]: INFO nova.compute.claims [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.594081] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.710583] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255912, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.728518] env[61545]: DEBUG nova.compute.manager [req-0433fdf5-c86e-4c38-843f-3a3c00aa323e req-7bada311-d73b-477f-8e1d-53574ed79f5b service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Received event network-vif-plugged-132b9a92-c7de-4fef-9870-cc4a0a7ae9c2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 885.728630] env[61545]: DEBUG oslo_concurrency.lockutils [req-0433fdf5-c86e-4c38-843f-3a3c00aa323e req-7bada311-d73b-477f-8e1d-53574ed79f5b service nova] Acquiring lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.728819] env[61545]: DEBUG oslo_concurrency.lockutils [req-0433fdf5-c86e-4c38-843f-3a3c00aa323e req-7bada311-d73b-477f-8e1d-53574ed79f5b service nova] Lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.729088] env[61545]: DEBUG oslo_concurrency.lockutils [req-0433fdf5-c86e-4c38-843f-3a3c00aa323e req-7bada311-d73b-477f-8e1d-53574ed79f5b service nova] Lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.729337] env[61545]: DEBUG nova.compute.manager [req-0433fdf5-c86e-4c38-843f-3a3c00aa323e req-7bada311-d73b-477f-8e1d-53574ed79f5b service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] No waiting events found dispatching network-vif-plugged-132b9a92-c7de-4fef-9870-cc4a0a7ae9c2 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 885.729548] env[61545]: WARNING nova.compute.manager [req-0433fdf5-c86e-4c38-843f-3a3c00aa323e req-7bada311-d73b-477f-8e1d-53574ed79f5b service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Received unexpected event network-vif-plugged-132b9a92-c7de-4fef-9870-cc4a0a7ae9c2 for instance with vm_state building and task_state spawning. [ 885.785390] env[61545]: DEBUG nova.network.neutron [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Successfully updated port: 132b9a92-c7de-4fef-9870-cc4a0a7ae9c2 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 885.879472] env[61545]: DEBUG nova.compute.utils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 885.881141] env[61545]: DEBUG nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 885.881141] env[61545]: DEBUG nova.network.neutron [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 885.949584] env[61545]: DEBUG nova.policy [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a76971360f074d398d059dbcb9ada6ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae60c9c1b7804134b570d0384dc85ea5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 886.178890] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.179332] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61479bc1-8656-4820-8566-09506381821c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.190720] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 886.190720] env[61545]: value = "task-4255913" [ 886.190720] env[61545]: _type = "Task" [ 886.190720] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.207078] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255913, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.219801] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255912, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.289405] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "refresh_cache-63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.289617] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "refresh_cache-63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.290698] env[61545]: DEBUG nova.network.neutron [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.327018] env[61545]: DEBUG nova.network.neutron [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Successfully created port: a63c04a1-28a0-46f2-a336-ab01754e90b6 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.384943] env[61545]: DEBUG nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 886.704716] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255913, 'name': PowerOffVM_Task, 'duration_secs': 0.411616} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.705197] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 886.709565] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6d497e-ede8-40d2-a908-b4bfd36dde48 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.722248] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255912, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.744963] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce63b86-b96f-4440-b971-c9e230ac91b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.793147] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.793520] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b0e322a-f943-4124-8425-0ceb4051815f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.805388] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 886.805388] env[61545]: value = "task-4255914" [ 886.805388] env[61545]: _type = "Task" [ 886.805388] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.818022] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 886.818022] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.818022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.818022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.818022] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.818022] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5438d3d8-44bd-4d15-afb1-9c61994bb2d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.828150] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.828404] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.829777] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a3fb358-453e-43fa-be55-e2920f316d1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.834771] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 886.834771] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525a497d-8d6d-49fd-c6b9-749e551bdccd" [ 886.834771] env[61545]: _type = "Task" [ 886.834771] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.846708] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525a497d-8d6d-49fd-c6b9-749e551bdccd, 'name': SearchDatastore_Task, 'duration_secs': 0.009136} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.847486] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea6af139-c7cc-4126-9856-ac47f90f6d73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.854966] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 886.854966] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524dbba3-44c3-c860-df53-95b7bb3af5ca" [ 886.854966] env[61545]: _type = "Task" [ 886.854966] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.864028] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524dbba3-44c3-c860-df53-95b7bb3af5ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.874907] env[61545]: DEBUG nova.network.neutron [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.135988] env[61545]: DEBUG nova.network.neutron [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Updating instance_info_cache with network_info: [{"id": "132b9a92-c7de-4fef-9870-cc4a0a7ae9c2", "address": "fa:16:3e:d0:7d:21", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap132b9a92-c7", "ovs_interfaceid": "132b9a92-c7de-4fef-9870-cc4a0a7ae9c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.170513] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2861f979-ba9d-4c85-8ba9-a3212940c4d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.179525] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6283d8f-7982-4745-a3cf-88cf34fb8292 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.214874] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6cab1a-b090-44e5-bd26-39c99099e314 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.222821] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255912, 'name': ReconfigVM_Task, 'duration_secs': 1.525279} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.225038] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67/1be4da80-c9ee-424e-b4e3-bdd22eb0cd67.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.225695] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a131f764-9a32-4ba6-8440-3960bd778271 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.228167] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20776586-fe0e-45c1-94e4-5ac19c65399f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.242667] env[61545]: DEBUG nova.compute.provider_tree [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.247306] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 887.247306] env[61545]: value = "task-4255915" [ 887.247306] env[61545]: _type = "Task" [ 887.247306] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.255929] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255915, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.367032] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524dbba3-44c3-c860-df53-95b7bb3af5ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010129} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.367032] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.367266] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. {{(pid=61545) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 887.367549] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3eed9d91-901a-426e-8237-e2ac1c4eb84c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.375699] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 887.375699] env[61545]: value = "task-4255916" [ 887.375699] env[61545]: _type = "Task" [ 887.375699] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.385605] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.394559] env[61545]: DEBUG nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 887.417248] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 887.417521] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.417708] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 887.417971] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.418167] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 887.420023] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 887.420023] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 887.420023] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 887.420023] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 887.420023] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 887.420023] env[61545]: DEBUG nova.virt.hardware [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 887.420375] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7951f929-4df4-4f7d-a2de-ce44dcb07a77 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.430249] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e35fb8-8512-4dee-ba5c-99fdb7c7caf7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.644558] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "refresh_cache-63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.644939] env[61545]: DEBUG nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Instance network_info: |[{"id": "132b9a92-c7de-4fef-9870-cc4a0a7ae9c2", "address": "fa:16:3e:d0:7d:21", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap132b9a92-c7", "ovs_interfaceid": "132b9a92-c7de-4fef-9870-cc4a0a7ae9c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 887.645458] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:7d:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '132b9a92-c7de-4fef-9870-cc4a0a7ae9c2', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 887.656018] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 887.656474] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 887.656735] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-832cc3da-8f2f-42be-9501-0b6de35a2518 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.680772] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 887.680772] env[61545]: value = "task-4255917" [ 887.680772] env[61545]: _type = "Task" [ 887.680772] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.690939] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255917, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.748292] env[61545]: DEBUG nova.scheduler.client.report [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.764540] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255915, 'name': Rename_Task, 'duration_secs': 0.183489} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.765155] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.765532] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d16ef16a-4de7-4bc3-8fc4-3c3b7f76277d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.775620] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 887.775620] env[61545]: value = "task-4255918" [ 887.775620] env[61545]: _type = "Task" [ 887.775620] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.787103] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.801217] env[61545]: DEBUG nova.compute.manager [req-0b28e3d7-92b3-4b0a-8775-c658c6e5d2c8 req-e0d1eb74-34f9-456e-914d-642fa1a71812 service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Received event network-changed-132b9a92-c7de-4fef-9870-cc4a0a7ae9c2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 887.801440] env[61545]: DEBUG nova.compute.manager [req-0b28e3d7-92b3-4b0a-8775-c658c6e5d2c8 req-e0d1eb74-34f9-456e-914d-642fa1a71812 service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Refreshing instance network info cache due to event network-changed-132b9a92-c7de-4fef-9870-cc4a0a7ae9c2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 887.801711] env[61545]: DEBUG oslo_concurrency.lockutils [req-0b28e3d7-92b3-4b0a-8775-c658c6e5d2c8 req-e0d1eb74-34f9-456e-914d-642fa1a71812 service nova] Acquiring lock "refresh_cache-63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.801860] env[61545]: DEBUG oslo_concurrency.lockutils [req-0b28e3d7-92b3-4b0a-8775-c658c6e5d2c8 req-e0d1eb74-34f9-456e-914d-642fa1a71812 service nova] Acquired lock "refresh_cache-63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.802039] env[61545]: DEBUG nova.network.neutron [req-0b28e3d7-92b3-4b0a-8775-c658c6e5d2c8 req-e0d1eb74-34f9-456e-914d-642fa1a71812 service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Refreshing network info cache for port 132b9a92-c7de-4fef-9870-cc4a0a7ae9c2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.887744] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255916, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.191863] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255917, 'name': CreateVM_Task, 'duration_secs': 0.50341} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.192072] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.192792] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.192962] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.193353] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 888.193649] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b85306d0-78a4-4e51-ba9a-3c5046f0d46e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.199870] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 888.199870] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c52c79-ae7d-b0b3-1913-5a2116f8d7e0" [ 888.199870] env[61545]: _type = "Task" [ 888.199870] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.209672] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c52c79-ae7d-b0b3-1913-5a2116f8d7e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.257585] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.893s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.258196] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 888.262254] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.286s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.262499] env[61545]: DEBUG nova.objects.instance [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'resources' on Instance uuid 60edf62d-3fb8-4d85-9a4e-ef71c565d940 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.286969] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255918, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.391263] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255916, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526444} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.391756] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. [ 888.397120] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b19f17-13b2-4baf-8a08-db4141253829 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.399642] env[61545]: DEBUG nova.network.neutron [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Successfully updated port: a63c04a1-28a0-46f2-a336-ab01754e90b6 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.432174] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.433181] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32ff093c-5f87-4497-8f12-96e699e2d172 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.456045] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 888.456045] env[61545]: value = "task-4255919" [ 888.456045] env[61545]: _type = "Task" [ 888.456045] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.465399] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255919, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.686637] env[61545]: DEBUG nova.network.neutron [req-0b28e3d7-92b3-4b0a-8775-c658c6e5d2c8 req-e0d1eb74-34f9-456e-914d-642fa1a71812 service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Updated VIF entry in instance network info cache for port 132b9a92-c7de-4fef-9870-cc4a0a7ae9c2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.687243] env[61545]: DEBUG nova.network.neutron [req-0b28e3d7-92b3-4b0a-8775-c658c6e5d2c8 req-e0d1eb74-34f9-456e-914d-642fa1a71812 service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Updating instance_info_cache with network_info: [{"id": "132b9a92-c7de-4fef-9870-cc4a0a7ae9c2", "address": "fa:16:3e:d0:7d:21", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap132b9a92-c7", "ovs_interfaceid": "132b9a92-c7de-4fef-9870-cc4a0a7ae9c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.713540] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c52c79-ae7d-b0b3-1913-5a2116f8d7e0, 'name': SearchDatastore_Task, 'duration_secs': 0.011075} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.714186] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.714450] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.714712] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.714854] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.715048] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.715315] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d31b51f3-3760-46e9-b522-f2ba0479555e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.730027] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.730199] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 888.731077] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe6e2c0-62f0-4336-9fce-9ab5c7cdd882 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.737958] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 888.737958] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ad874-e63e-b82d-daa9-e608f263dd09" [ 888.737958] env[61545]: _type = "Task" [ 888.737958] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.746731] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ad874-e63e-b82d-daa9-e608f263dd09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.765301] env[61545]: DEBUG nova.compute.utils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 888.766755] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 888.766965] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 888.787436] env[61545]: DEBUG oslo_vmware.api [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4255918, 'name': PowerOnVM_Task, 'duration_secs': 0.623846} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.787705] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.787906] env[61545]: INFO nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Took 9.79 seconds to spawn the instance on the hypervisor. [ 888.788096] env[61545]: DEBUG nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.788886] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a86fda-d88c-42a3-a210-d9d76e42076a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.902012] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "refresh_cache-f7a16153-2ef7-4be4-90a2-5ad6616203f8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.902178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "refresh_cache-f7a16153-2ef7-4be4-90a2-5ad6616203f8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.902329] env[61545]: DEBUG nova.network.neutron [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.968008] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.077308] env[61545]: DEBUG nova.policy [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4943bc31b1c4f4396688c44c677db0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d2daf4ae4164d9c83882d0e64124316', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 889.193229] env[61545]: DEBUG oslo_concurrency.lockutils [req-0b28e3d7-92b3-4b0a-8775-c658c6e5d2c8 req-e0d1eb74-34f9-456e-914d-642fa1a71812 service nova] Releasing lock "refresh_cache-63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.251768] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ad874-e63e-b82d-daa9-e608f263dd09, 'name': SearchDatastore_Task, 'duration_secs': 0.043569} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.252735] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a429281d-a282-40b9-9984-1019b2166783 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.266311] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 889.266311] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522af6ac-5305-2d2a-4fdf-47b6cd8f0fef" [ 889.266311] env[61545]: _type = "Task" [ 889.266311] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.271026] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 889.279810] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522af6ac-5305-2d2a-4fdf-47b6cd8f0fef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.310460] env[61545]: INFO nova.compute.manager [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Took 41.54 seconds to build instance. [ 889.428314] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd112f7a-086c-4cb1-940b-1e2428dd1bf2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.436239] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d745a13-8716-42d0-8ac6-eae846f876e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.470852] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8c207d-89ef-499a-9c3b-a3a391480f1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.487899] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e3a912-12a2-49d8-a276-090a324d27cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.494207] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.503146] env[61545]: DEBUG nova.compute.provider_tree [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.505165] env[61545]: DEBUG nova.network.neutron [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.717118] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Successfully created port: d04fca3a-1fcc-4b30-b0e7-83c25fe97200 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 889.782175] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522af6ac-5305-2d2a-4fdf-47b6cd8f0fef, 'name': SearchDatastore_Task, 'duration_secs': 0.017543} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.782850] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.782937] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae/63b3a0ac-6077-4b07-bff0-81e5faa6a2ae.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.783194] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6858a01e-5a48-45f4-ab0b-e5fe91d7f7a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.789224] env[61545]: DEBUG nova.network.neutron [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Updating instance_info_cache with network_info: [{"id": "a63c04a1-28a0-46f2-a336-ab01754e90b6", "address": "fa:16:3e:3b:b2:50", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa63c04a1-28", "ovs_interfaceid": "a63c04a1-28a0-46f2-a336-ab01754e90b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.794022] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 889.794022] env[61545]: value = "task-4255920" [ 889.794022] env[61545]: _type = "Task" [ 889.794022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.802346] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.812206] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a50c94db-5270-40ac-8dc1-7ae287aeaf0f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.315s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.976938] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255919, 'name': ReconfigVM_Task, 'duration_secs': 1.170403} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.977306] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Reconfigured VM instance instance-00000033 to attach disk [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.978302] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67838dba-92f8-45be-9e5e-7d444d5e9df1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.007340] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1eb8449-1767-4c16-b1a2-49ebc7adb346 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.021329] env[61545]: DEBUG nova.scheduler.client.report [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 890.034462] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 890.034462] env[61545]: value = "task-4255921" [ 890.034462] env[61545]: _type = "Task" [ 890.034462] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.045053] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255921, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.285476] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 890.293622] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "refresh_cache-f7a16153-2ef7-4be4-90a2-5ad6616203f8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.294046] env[61545]: DEBUG nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Instance network_info: |[{"id": "a63c04a1-28a0-46f2-a336-ab01754e90b6", "address": "fa:16:3e:3b:b2:50", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa63c04a1-28", "ovs_interfaceid": "a63c04a1-28a0-46f2-a336-ab01754e90b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 890.294891] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:b2:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a63c04a1-28a0-46f2-a336-ab01754e90b6', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.303232] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 890.310705] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.312580] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5fd206c-7280-4875-8922-b732d6121c76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.333947] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.334362] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.334537] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.334813] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.335042] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.335115] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.335281] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.335440] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.335611] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.335862] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.335956] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.336535] env[61545]: DEBUG nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 890.339921] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d49e5ce-091b-410c-84da-c7f057bab1a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.346948] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255920, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.355581] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7aa861-5eaf-4420-8835-68247d0b61fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.359817] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.359817] env[61545]: value = "task-4255922" [ 890.359817] env[61545]: _type = "Task" [ 890.359817] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.361341] env[61545]: INFO nova.compute.manager [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Rebuilding instance [ 890.364784] env[61545]: DEBUG nova.compute.manager [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Received event network-vif-plugged-a63c04a1-28a0-46f2-a336-ab01754e90b6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 890.365464] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] Acquiring lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.365464] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] Lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.365464] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] Lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.365688] env[61545]: DEBUG nova.compute.manager [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] No waiting events found dispatching network-vif-plugged-a63c04a1-28a0-46f2-a336-ab01754e90b6 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 890.365730] env[61545]: WARNING nova.compute.manager [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Received unexpected event network-vif-plugged-a63c04a1-28a0-46f2-a336-ab01754e90b6 for instance with vm_state building and task_state spawning. [ 890.365995] env[61545]: DEBUG nova.compute.manager [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Received event network-changed-a63c04a1-28a0-46f2-a336-ab01754e90b6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 890.366199] env[61545]: DEBUG nova.compute.manager [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Refreshing instance network info cache due to event network-changed-a63c04a1-28a0-46f2-a336-ab01754e90b6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 890.366411] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] Acquiring lock "refresh_cache-f7a16153-2ef7-4be4-90a2-5ad6616203f8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.366550] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] Acquired lock "refresh_cache-f7a16153-2ef7-4be4-90a2-5ad6616203f8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.366745] env[61545]: DEBUG nova.network.neutron [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Refreshing network info cache for port a63c04a1-28a0-46f2-a336-ab01754e90b6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.389127] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255922, 'name': CreateVM_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.418277] env[61545]: DEBUG nova.compute.manager [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 890.419227] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d32d77-e9c0-45f8-91c7-f148e81c54ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.529682] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.266s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.534454] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.440s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.539020] env[61545]: INFO nova.compute.claims [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.554811] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255921, 'name': ReconfigVM_Task, 'duration_secs': 0.36033} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.555352] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.555447] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17b1762c-5e81-4f3d-b13e-7e2e69302c01 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.565852] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 890.565852] env[61545]: value = "task-4255923" [ 890.565852] env[61545]: _type = "Task" [ 890.565852] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.574036] env[61545]: INFO nova.scheduler.client.report [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted allocations for instance 60edf62d-3fb8-4d85-9a4e-ef71c565d940 [ 890.585923] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255923, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.805641] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552296} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.806136] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae/63b3a0ac-6077-4b07-bff0-81e5faa6a2ae.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.806337] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.806659] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e348241f-c82c-4eaa-a95c-0fc70065ec66 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.814732] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 890.814732] env[61545]: value = "task-4255924" [ 890.814732] env[61545]: _type = "Task" [ 890.814732] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.825400] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.870886] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.879857] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255922, 'name': CreateVM_Task, 'duration_secs': 0.497524} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.879857] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 890.879857] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.879857] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.879857] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 890.879857] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72bea5b6-0311-4d32-8426-36ad38a0f3b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.886707] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 890.886707] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9f0ec-3254-a727-555c-f0815be106ea" [ 890.886707] env[61545]: _type = "Task" [ 890.886707] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.896665] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9f0ec-3254-a727-555c-f0815be106ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.083955] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255923, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.095867] env[61545]: DEBUG nova.compute.manager [req-2ad46ca7-edb3-45a4-943f-be0cb1262902 req-c627fdbd-f400-4fce-8883-16dea45261ae service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Received event network-changed-d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 891.096313] env[61545]: DEBUG nova.compute.manager [req-2ad46ca7-edb3-45a4-943f-be0cb1262902 req-c627fdbd-f400-4fce-8883-16dea45261ae service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Refreshing instance network info cache due to event network-changed-d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 891.096991] env[61545]: DEBUG oslo_concurrency.lockutils [req-2ad46ca7-edb3-45a4-943f-be0cb1262902 req-c627fdbd-f400-4fce-8883-16dea45261ae service nova] Acquiring lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.097298] env[61545]: DEBUG oslo_concurrency.lockutils [req-2ad46ca7-edb3-45a4-943f-be0cb1262902 req-c627fdbd-f400-4fce-8883-16dea45261ae service nova] Acquired lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.097815] env[61545]: DEBUG nova.network.neutron [req-2ad46ca7-edb3-45a4-943f-be0cb1262902 req-c627fdbd-f400-4fce-8883-16dea45261ae service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Refreshing network info cache for port d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.100126] env[61545]: DEBUG oslo_concurrency.lockutils [None req-72cc57aa-6d97-4f6b-bcb5-67b10d28b526 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "60edf62d-3fb8-4d85-9a4e-ef71c565d940" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.872s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.285139] env[61545]: DEBUG nova.network.neutron [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Updated VIF entry in instance network info cache for port a63c04a1-28a0-46f2-a336-ab01754e90b6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.285544] env[61545]: DEBUG nova.network.neutron [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Updating instance_info_cache with network_info: [{"id": "a63c04a1-28a0-46f2-a336-ab01754e90b6", "address": "fa:16:3e:3b:b2:50", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa63c04a1-28", "ovs_interfaceid": "a63c04a1-28a0-46f2-a336-ab01754e90b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.327962] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088332} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.327962] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.328798] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3426c6ff-b991-4ac4-b2e1-d9cd4b9ede90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.353070] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae/63b3a0ac-6077-4b07-bff0-81e5faa6a2ae.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.353407] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fa0fe3c-31f4-4fe8-96bb-0fcc44983cea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.378442] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 891.378442] env[61545]: value = "task-4255925" [ 891.378442] env[61545]: _type = "Task" [ 891.378442] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.390133] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255925, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.401053] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9f0ec-3254-a727-555c-f0815be106ea, 'name': SearchDatastore_Task, 'duration_secs': 0.012026} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.401355] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.401612] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.401814] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.402017] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.402143] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.402412] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-514a21b9-d1c8-41e1-b75e-98bc4ab97210 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.419856] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.420059] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.420839] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-464e4af8-872d-41aa-be45-4718bea27395 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.426402] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 891.426402] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52182505-2709-788a-ff6e-af9dcc0e0efb" [ 891.426402] env[61545]: _type = "Task" [ 891.426402] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.434730] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.435385] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52182505-2709-788a-ff6e-af9dcc0e0efb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.435617] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46539be2-6939-442c-926e-4f1832dc07ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.445117] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 891.445117] env[61545]: value = "task-4255926" [ 891.445117] env[61545]: _type = "Task" [ 891.445117] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.454206] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255926, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.582747] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255923, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.787966] env[61545]: DEBUG oslo_concurrency.lockutils [req-0a5e2630-e5f2-4e4c-ad94-0651af02f9be req-cb705d60-dac9-4596-a647-b0350f6aa31e service nova] Releasing lock "refresh_cache-f7a16153-2ef7-4be4-90a2-5ad6616203f8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.887834] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.937401] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52182505-2709-788a-ff6e-af9dcc0e0efb, 'name': SearchDatastore_Task, 'duration_secs': 0.040359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.938201] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a68e6202-b6f6-498c-b9b8-59d4f1139d9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.943930] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 891.943930] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52445ef6-96cc-2801-f66f-4da4639da84a" [ 891.943930] env[61545]: _type = "Task" [ 891.943930] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.959325] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255926, 'name': PowerOffVM_Task, 'duration_secs': 0.228491} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.962530] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.963231] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.963497] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52445ef6-96cc-2801-f66f-4da4639da84a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.963708] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-649fe744-88c4-4dd7-8fc2-97b107760d53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.974529] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 891.974529] env[61545]: value = "task-4255927" [ 891.974529] env[61545]: _type = "Task" [ 891.974529] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.985704] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 891.985953] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 891.986198] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838592', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'name': 'volume-7578d319-27d0-4d5d-99aa-9bce7818396d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '26e339f1-182b-4f00-b7c2-a2a32e942d04', 'attached_at': '', 'detached_at': '', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'serial': '7578d319-27d0-4d5d-99aa-9bce7818396d'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 891.986992] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b319729a-f0de-474e-a03f-523175eec776 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.007711] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e76308d-2877-4ef0-8060-0af0c2ee8fec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.015132] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57b1c03-6384-4f8f-88f6-3ad62c9ec860 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.044022] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f9ce8c-17b7-4ca9-82bf-00a789ebca0d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.057533] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] The volume has not been displaced from its original location: [datastore2] volume-7578d319-27d0-4d5d-99aa-9bce7818396d/volume-7578d319-27d0-4d5d-99aa-9bce7818396d.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 892.063060] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Reconfiguring VM instance instance-0000001f to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 892.068148] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-595176d3-3907-4fe0-b450-cbf62ae63589 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.080818] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Successfully updated port: d04fca3a-1fcc-4b30-b0e7-83c25fe97200 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.091856] env[61545]: DEBUG oslo_vmware.api [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255923, 'name': PowerOnVM_Task, 'duration_secs': 1.052579} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.093262] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 892.095431] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 892.095431] env[61545]: value = "task-4255928" [ 892.095431] env[61545]: _type = "Task" [ 892.095431] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.098957] env[61545]: DEBUG nova.compute.manager [None req-3871dd20-943d-4d4b-a0cd-ab6951d71be1 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 892.099828] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e664d648-c9a4-4ab9-bf7a-48c39bf09f82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.116451] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.217223] env[61545]: DEBUG nova.network.neutron [req-2ad46ca7-edb3-45a4-943f-be0cb1262902 req-c627fdbd-f400-4fce-8883-16dea45261ae service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updated VIF entry in instance network info cache for port d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.217731] env[61545]: DEBUG nova.network.neutron [req-2ad46ca7-edb3-45a4-943f-be0cb1262902 req-c627fdbd-f400-4fce-8883-16dea45261ae service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance_info_cache with network_info: [{"id": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "address": "fa:16:3e:c5:11:ab", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd36c7f3a-2a", "ovs_interfaceid": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.314567] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073711ae-b6ab-418a-81bb-8b0cc886844a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.322590] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc7192d-0975-40c2-bde9-9a6dd3b9d82d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.356921] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb64f98d-fc43-4ba9-9ac5-279f4cd3ae16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.366532] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c63f49e-7245-4737-b7d4-2793cc6a76a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.383255] env[61545]: DEBUG nova.compute.provider_tree [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.395270] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.460030] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52445ef6-96cc-2801-f66f-4da4639da84a, 'name': SearchDatastore_Task, 'duration_secs': 0.032345} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.460311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.460572] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] f7a16153-2ef7-4be4-90a2-5ad6616203f8/f7a16153-2ef7-4be4-90a2-5ad6616203f8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 892.460840] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e26b098a-7109-4d4b-9838-f9660a9e28ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.468323] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 892.468323] env[61545]: value = "task-4255929" [ 892.468323] env[61545]: _type = "Task" [ 892.468323] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.476372] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.584175] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "refresh_cache-a84d7a3d-2f7e-459d-94ca-7caa32b7a472" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.584351] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "refresh_cache-a84d7a3d-2f7e-459d-94ca-7caa32b7a472" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.584522] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 892.613671] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255928, 'name': ReconfigVM_Task, 'duration_secs': 0.319903} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.614074] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Reconfigured VM instance instance-0000001f to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 892.623259] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53c62009-335c-4434-b07c-0e28e4c26d29 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.640097] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 892.640097] env[61545]: value = "task-4255930" [ 892.640097] env[61545]: _type = "Task" [ 892.640097] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.651520] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255930, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.723492] env[61545]: DEBUG oslo_concurrency.lockutils [req-2ad46ca7-edb3-45a4-943f-be0cb1262902 req-c627fdbd-f400-4fce-8883-16dea45261ae service nova] Releasing lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.733465] env[61545]: DEBUG nova.compute.manager [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Received event network-vif-plugged-d04fca3a-1fcc-4b30-b0e7-83c25fe97200 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 892.733878] env[61545]: DEBUG oslo_concurrency.lockutils [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] Acquiring lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.734199] env[61545]: DEBUG oslo_concurrency.lockutils [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] Lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.734349] env[61545]: DEBUG oslo_concurrency.lockutils [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] Lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.734541] env[61545]: DEBUG nova.compute.manager [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] No waiting events found dispatching network-vif-plugged-d04fca3a-1fcc-4b30-b0e7-83c25fe97200 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.734762] env[61545]: WARNING nova.compute.manager [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Received unexpected event network-vif-plugged-d04fca3a-1fcc-4b30-b0e7-83c25fe97200 for instance with vm_state building and task_state spawning. [ 892.734948] env[61545]: DEBUG nova.compute.manager [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Received event network-changed-d04fca3a-1fcc-4b30-b0e7-83c25fe97200 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 892.735209] env[61545]: DEBUG nova.compute.manager [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Refreshing instance network info cache due to event network-changed-d04fca3a-1fcc-4b30-b0e7-83c25fe97200. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 892.735484] env[61545]: DEBUG oslo_concurrency.lockutils [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] Acquiring lock "refresh_cache-a84d7a3d-2f7e-459d-94ca-7caa32b7a472" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.889811] env[61545]: DEBUG nova.scheduler.client.report [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 892.897800] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255925, 'name': ReconfigVM_Task, 'duration_secs': 1.259008} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.899345] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae/63b3a0ac-6077-4b07-bff0-81e5faa6a2ae.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.899345] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c857ecc7-80a2-4160-81c9-17b72d4e6c2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.911538] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 892.911538] env[61545]: value = "task-4255931" [ 892.911538] env[61545]: _type = "Task" [ 892.911538] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.926411] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255931, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.964058] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.964058] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.980070] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255929, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.118772] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.157311] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255930, 'name': ReconfigVM_Task, 'duration_secs': 0.132306} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.157671] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838592', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'name': 'volume-7578d319-27d0-4d5d-99aa-9bce7818396d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '26e339f1-182b-4f00-b7c2-a2a32e942d04', 'attached_at': '', 'detached_at': '', 'volume_id': '7578d319-27d0-4d5d-99aa-9bce7818396d', 'serial': '7578d319-27d0-4d5d-99aa-9bce7818396d'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 893.157975] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 893.160033] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fe2617-c8ea-49f9-a0fb-8147b7319021 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.168116] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.168417] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c4a8d07-c9c8-4e63-83ea-9b0f8f6a162c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.240710] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.241870] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.241870] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Deleting the datastore file [datastore2] 26e339f1-182b-4f00-b7c2-a2a32e942d04 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.241870] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22e846fd-9b04-4ff5-b535-5794ccd6b20f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.251539] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for the task: (returnval){ [ 893.251539] env[61545]: value = "task-4255933" [ 893.251539] env[61545]: _type = "Task" [ 893.251539] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.261920] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.346166] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Updating instance_info_cache with network_info: [{"id": "d04fca3a-1fcc-4b30-b0e7-83c25fe97200", "address": "fa:16:3e:95:5e:d9", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd04fca3a-1f", "ovs_interfaceid": "d04fca3a-1fcc-4b30-b0e7-83c25fe97200", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.399309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.865s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.400824] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 893.403207] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.206s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.407042] env[61545]: DEBUG nova.objects.instance [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lazy-loading 'resources' on Instance uuid 609ba431-b42b-4b0d-9c16-06e19bee114c {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.431657] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255931, 'name': Rename_Task, 'duration_secs': 0.432588} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.432225] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.432495] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2efe5316-7a71-45ea-b95d-a2a3cd13c27e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.441090] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 893.441090] env[61545]: value = "task-4255934" [ 893.441090] env[61545]: _type = "Task" [ 893.441090] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.451717] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.480500] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.746729} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.480870] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] f7a16153-2ef7-4be4-90a2-5ad6616203f8/f7a16153-2ef7-4be4-90a2-5ad6616203f8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 893.481109] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 893.481843] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a493687-1c94-499c-aff2-0cc5c7402f4d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.489396] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 893.489396] env[61545]: value = "task-4255935" [ 893.489396] env[61545]: _type = "Task" [ 893.489396] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.500472] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255935, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.762105] env[61545]: DEBUG oslo_vmware.api [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Task: {'id': task-4255933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.117669} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.762596] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.762894] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.763196] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.826127] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 893.826127] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b055720-c039-4287-a734-b00149bba79e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.834170] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1189a5ef-63e5-43cd-b2ba-91869a7f297e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.852740] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "refresh_cache-a84d7a3d-2f7e-459d-94ca-7caa32b7a472" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.853371] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Instance network_info: |[{"id": "d04fca3a-1fcc-4b30-b0e7-83c25fe97200", "address": "fa:16:3e:95:5e:d9", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd04fca3a-1f", "ovs_interfaceid": "d04fca3a-1fcc-4b30-b0e7-83c25fe97200", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 893.855189] env[61545]: DEBUG oslo_concurrency.lockutils [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] Acquired lock "refresh_cache-a84d7a3d-2f7e-459d-94ca-7caa32b7a472" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.855189] env[61545]: DEBUG nova.network.neutron [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Refreshing network info cache for port d04fca3a-1fcc-4b30-b0e7-83c25fe97200 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 893.855189] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:5e:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c80243e-93a7-4a95-bc8d-e9534bacd66e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd04fca3a-1fcc-4b30-b0e7-83c25fe97200', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 893.863622] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 893.885014] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 893.886653] env[61545]: ERROR nova.compute.manager [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Failed to detach volume 7578d319-27d0-4d5d-99aa-9bce7818396d from /dev/sda: nova.exception.InstanceNotFound: Instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 could not be found. [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Traceback (most recent call last): [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self.driver.rebuild(**kwargs) [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] raise NotImplementedError() [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] NotImplementedError [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] During handling of the above exception, another exception occurred: [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Traceback (most recent call last): [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self.driver.detach_volume(context, old_connection_info, [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] return self._volumeops.detach_volume(connection_info, instance) [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self._detach_volume_vmdk(connection_info, instance) [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] stable_ref.fetch_moref(session) [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] raise exception.InstanceNotFound(instance_id=self._uuid) [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] nova.exception.InstanceNotFound: Instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 could not be found. [ 893.886653] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] [ 893.889328] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16406a75-98e6-4741-ac0b-575ea3aafde0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.908408] env[61545]: DEBUG nova.compute.utils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 893.911858] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 893.911973] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 893.915911] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 893.915911] env[61545]: value = "task-4255936" [ 893.915911] env[61545]: _type = "Task" [ 893.915911] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.922418] env[61545]: INFO nova.compute.manager [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Unrescuing [ 893.923715] env[61545]: DEBUG oslo_concurrency.lockutils [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.923715] env[61545]: DEBUG oslo_concurrency.lockutils [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquired lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.923715] env[61545]: DEBUG nova.network.neutron [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.930681] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255936, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.953485] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255934, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.999803] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255935, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080186} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.000127] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.001252] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d76b4ef-0098-4140-a57a-4531a424b294 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.011211] env[61545]: DEBUG nova.policy [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4943bc31b1c4f4396688c44c677db0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d2daf4ae4164d9c83882d0e64124316', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 894.029853] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] f7a16153-2ef7-4be4-90a2-5ad6616203f8/f7a16153-2ef7-4be4-90a2-5ad6616203f8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.032795] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-915bba66-cf66-4a39-8a4c-3f5f35cd151d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.058028] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 894.058028] env[61545]: value = "task-4255937" [ 894.058028] env[61545]: _type = "Task" [ 894.058028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.060563] env[61545]: DEBUG nova.compute.utils [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Build of instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 aborted: Failed to rebuild volume backed instance. {{(pid=61545) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 894.065250] env[61545]: ERROR nova.compute.manager [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 aborted: Failed to rebuild volume backed instance. [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Traceback (most recent call last): [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self.driver.rebuild(**kwargs) [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] raise NotImplementedError() [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] NotImplementedError [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] During handling of the above exception, another exception occurred: [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Traceback (most recent call last): [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self._detach_root_volume(context, instance, root_bdm) [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] with excutils.save_and_reraise_exception(): [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self.force_reraise() [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] raise self.value [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self.driver.detach_volume(context, old_connection_info, [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] return self._volumeops.detach_volume(connection_info, instance) [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self._detach_volume_vmdk(connection_info, instance) [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] stable_ref.fetch_moref(session) [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] raise exception.InstanceNotFound(instance_id=self._uuid) [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] nova.exception.InstanceNotFound: Instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 could not be found. [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] [ 894.065250] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] During handling of the above exception, another exception occurred: [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Traceback (most recent call last): [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 11502, in _error_out_instance_on_exception [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] yield [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self._do_rebuild_instance_with_claim( [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self._do_rebuild_instance( [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self._rebuild_default_impl(**kwargs) [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] self._rebuild_volume_backed_instance( [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] raise exception.BuildAbortException( [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] nova.exception.BuildAbortException: Build of instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 aborted: Failed to rebuild volume backed instance. [ 894.066566] env[61545]: ERROR nova.compute.manager [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] [ 894.071116] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255937, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.411937] env[61545]: DEBUG nova.network.neutron [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Updated VIF entry in instance network info cache for port d04fca3a-1fcc-4b30-b0e7-83c25fe97200. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 894.411937] env[61545]: DEBUG nova.network.neutron [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Updating instance_info_cache with network_info: [{"id": "d04fca3a-1fcc-4b30-b0e7-83c25fe97200", "address": "fa:16:3e:95:5e:d9", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd04fca3a-1f", "ovs_interfaceid": "d04fca3a-1fcc-4b30-b0e7-83c25fe97200", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.420019] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 894.432124] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255936, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.452678] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255934, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.580271] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255937, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.739392] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4aa7b49-5682-4283-9811-69acd76576fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.750933] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9093d06b-7022-48a2-9110-e215ada3b201 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.795608] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf7e74d-ee5f-4829-a528-993433711be8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.805156] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58c5d4e-7652-4691-b358-9aa34ec21214 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.821083] env[61545]: DEBUG nova.compute.provider_tree [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.902843] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Successfully created port: 053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.914026] env[61545]: DEBUG oslo_concurrency.lockutils [req-5a041da4-fb66-4b7f-acb6-e9aac2743148 req-0257c3eb-046b-46c9-bc10-0b40136b74f0 service nova] Releasing lock "refresh_cache-a84d7a3d-2f7e-459d-94ca-7caa32b7a472" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.932949] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255936, 'name': CreateVM_Task, 'duration_secs': 0.980928} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.933270] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 894.933793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.933958] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.934284] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 894.934539] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34b2ca9a-e3f8-43af-9c46-711efcb251ca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.939643] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 894.939643] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e5168b-5ea5-96da-e359-add876cd81fa" [ 894.939643] env[61545]: _type = "Task" [ 894.939643] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.952456] env[61545]: DEBUG oslo_vmware.api [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255934, 'name': PowerOnVM_Task, 'duration_secs': 1.258312} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.955464] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.955678] env[61545]: INFO nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Took 10.42 seconds to spawn the instance on the hypervisor. [ 894.955865] env[61545]: DEBUG nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.956200] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e5168b-5ea5-96da-e359-add876cd81fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.956988] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da26071d-0d33-4b63-9c49-f6aec15b985f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.960252] env[61545]: DEBUG nova.network.neutron [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Updating instance_info_cache with network_info: [{"id": "8564beef-15a1-4c85-b090-c45df7394356", "address": "fa:16:3e:22:66:bd", "network": {"id": "eeb7a7cc-f16b-446a-9000-50f2773e1dc6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1490351281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3a1542baa59a4be387f3fe1526116d37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8564beef-15", "ovs_interfaceid": "8564beef-15a1-4c85-b090-c45df7394356", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.069140] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255937, 'name': ReconfigVM_Task, 'duration_secs': 0.852593} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.069444] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Reconfigured VM instance instance-00000038 to attach disk [datastore2] f7a16153-2ef7-4be4-90a2-5ad6616203f8/f7a16153-2ef7-4be4-90a2-5ad6616203f8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.070155] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40a3619f-8bec-4d48-bc84-80f461ae96cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.076587] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 895.076587] env[61545]: value = "task-4255938" [ 895.076587] env[61545]: _type = "Task" [ 895.076587] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.086829] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255938, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.325013] env[61545]: DEBUG nova.scheduler.client.report [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.430250] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 895.456073] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e5168b-5ea5-96da-e359-add876cd81fa, 'name': SearchDatastore_Task, 'duration_secs': 0.031138} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.456773] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.456773] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.457464] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.457668] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.457886] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.460193] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15bc25e1-efa5-463c-a518-24fde4663810 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.465417] env[61545]: DEBUG oslo_concurrency.lockutils [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Releasing lock "refresh_cache-e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.465417] env[61545]: DEBUG nova.objects.instance [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lazy-loading 'flavor' on Instance uuid e3742aa7-0b26-41f5-b8c0-9388ef2b7e74 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.477675] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 895.477675] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.477675] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 895.477675] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.477675] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 895.479056] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 895.479056] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 895.479056] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 895.479056] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 895.479056] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 895.479056] env[61545]: DEBUG nova.virt.hardware [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 895.479056] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.479531] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.480768] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3127d5dd-5e76-4642-ad8d-c94fee6f25f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.486271] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0473050-107a-444e-a62f-890b80e451dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.489720] env[61545]: INFO nova.compute.manager [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Took 37.79 seconds to build instance. [ 895.498153] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7346931-482a-4f2b-8903-ba0e786b543f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.501806] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 895.501806] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bd9ad3-ff46-86a8-9067-5a23a2771b3c" [ 895.501806] env[61545]: _type = "Task" [ 895.501806] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.521871] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bd9ad3-ff46-86a8-9067-5a23a2771b3c, 'name': SearchDatastore_Task} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.524111] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c7a050c-1d34-43e9-9a68-7eec75d309af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.529454] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 895.529454] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523cbd9d-3a13-31c4-5884-6926f1783105" [ 895.529454] env[61545]: _type = "Task" [ 895.529454] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.538889] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523cbd9d-3a13-31c4-5884-6926f1783105, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.587734] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255938, 'name': Rename_Task, 'duration_secs': 0.164224} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.592378] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.592938] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-529b2d82-ae6f-4d09-9e65-4110bc45ba09 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.600564] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 895.600564] env[61545]: value = "task-4255939" [ 895.600564] env[61545]: _type = "Task" [ 895.600564] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.611137] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255939, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.832076] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.428s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.836040] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.060s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.836040] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.836040] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 895.836040] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.038s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.837638] env[61545]: INFO nova.compute.claims [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 895.840972] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824a8b4d-6722-40c7-8ba3-77f85f0ef458 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.849537] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a761ca-51a4-4f55-8db7-21ff6827cb6c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.857689] env[61545]: INFO nova.scheduler.client.report [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Deleted allocations for instance 609ba431-b42b-4b0d-9c16-06e19bee114c [ 895.869823] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37d9569-ce8e-48e4-9aee-91d81448bb7e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.877536] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826c332a-d940-4fb2-ad38-11f739c1bfb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.912105] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177928MB free_disk=244GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 895.912361] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.972297] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b795921-9f1a-4319-ac2b-cd6b7c91e338 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.995231] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc9cf29-faea-4343-b1b7-c3a1b16c400a tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.846s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.995578] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.996859] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-045acced-a494-48f6-8713-f4f50c99d773 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.004469] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 896.004469] env[61545]: value = "task-4255940" [ 896.004469] env[61545]: _type = "Task" [ 896.004469] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.014232] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.042116] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523cbd9d-3a13-31c4-5884-6926f1783105, 'name': SearchDatastore_Task, 'duration_secs': 0.013038} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.042116] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.042116] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a84d7a3d-2f7e-459d-94ca-7caa32b7a472/a84d7a3d-2f7e-459d-94ca-7caa32b7a472.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 896.042116] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ff34e8c-8869-435a-ae0b-34434859ff7c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.050316] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 896.050316] env[61545]: value = "task-4255941" [ 896.050316] env[61545]: _type = "Task" [ 896.050316] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.063235] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.095343] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.111846] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255939, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.377173] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b8a2e8db-814e-4990-bd0e-6833b9e0fd8d tempest-ServerRescueTestJSONUnderV235-989113730 tempest-ServerRescueTestJSONUnderV235-989113730-project-member] Lock "609ba431-b42b-4b0d-9c16-06e19bee114c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.555s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.500135] env[61545]: DEBUG nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 896.519530] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255940, 'name': PowerOffVM_Task, 'duration_secs': 0.22246} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.519530] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 896.526256] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Reconfiguring VM instance instance-00000033 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 896.527355] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8938d6a-ff60-4657-828c-05efb5884328 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.550045] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 896.550045] env[61545]: value = "task-4255942" [ 896.550045] env[61545]: _type = "Task" [ 896.550045] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.570072] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255941, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.576620] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255942, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.618250] env[61545]: DEBUG oslo_vmware.api [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4255939, 'name': PowerOnVM_Task, 'duration_secs': 0.730871} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.618250] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.618250] env[61545]: INFO nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Took 9.22 seconds to spawn the instance on the hypervisor. [ 896.618250] env[61545]: DEBUG nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 896.618732] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ffae91-37e5-418e-9222-a3e5ab9c2602 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.027795] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.068264] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.075777] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.606214} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.077421] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a84d7a3d-2f7e-459d-94ca-7caa32b7a472/a84d7a3d-2f7e-459d-94ca-7caa32b7a472.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.077733] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.078110] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e5abbb1-970d-4812-9481-27fdaceb3ad6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.086522] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 897.086522] env[61545]: value = "task-4255943" [ 897.086522] env[61545]: _type = "Task" [ 897.086522] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.099045] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.148706] env[61545]: INFO nova.compute.manager [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Took 39.44 seconds to build instance. [ 897.185184] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Successfully updated port: 053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.260552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Acquiring lock "26e339f1-182b-4f00-b7c2-a2a32e942d04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.261353] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "26e339f1-182b-4f00-b7c2-a2a32e942d04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.261659] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Acquiring lock "26e339f1-182b-4f00-b7c2-a2a32e942d04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.261862] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "26e339f1-182b-4f00-b7c2-a2a32e942d04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.262046] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "26e339f1-182b-4f00-b7c2-a2a32e942d04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.264593] env[61545]: INFO nova.compute.manager [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Terminating instance [ 897.381325] env[61545]: DEBUG nova.compute.manager [req-089d281e-e881-440e-a4bf-30d2c0d42ef1 req-f2d07c7e-e277-4c3a-a375-af5e3c751b5a service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Received event network-vif-plugged-053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 897.381572] env[61545]: DEBUG oslo_concurrency.lockutils [req-089d281e-e881-440e-a4bf-30d2c0d42ef1 req-f2d07c7e-e277-4c3a-a375-af5e3c751b5a service nova] Acquiring lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.381943] env[61545]: DEBUG oslo_concurrency.lockutils [req-089d281e-e881-440e-a4bf-30d2c0d42ef1 req-f2d07c7e-e277-4c3a-a375-af5e3c751b5a service nova] Lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.382245] env[61545]: DEBUG oslo_concurrency.lockutils [req-089d281e-e881-440e-a4bf-30d2c0d42ef1 req-f2d07c7e-e277-4c3a-a375-af5e3c751b5a service nova] Lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.382499] env[61545]: DEBUG nova.compute.manager [req-089d281e-e881-440e-a4bf-30d2c0d42ef1 req-f2d07c7e-e277-4c3a-a375-af5e3c751b5a service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] No waiting events found dispatching network-vif-plugged-053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 897.382769] env[61545]: WARNING nova.compute.manager [req-089d281e-e881-440e-a4bf-30d2c0d42ef1 req-f2d07c7e-e277-4c3a-a375-af5e3c751b5a service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Received unexpected event network-vif-plugged-053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7 for instance with vm_state building and task_state spawning. [ 897.570019] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255942, 'name': ReconfigVM_Task, 'duration_secs': 0.846688} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.570326] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Reconfigured VM instance instance-00000033 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 897.570509] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.570788] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bda45e92-ae7d-43bf-9883-d6e16d1a6951 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.581350] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 897.581350] env[61545]: value = "task-4255944" [ 897.581350] env[61545]: _type = "Task" [ 897.581350] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.585907] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af679e8-0533-4a3a-91dc-b72e5de82d2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.595860] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.600233] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae1c94f-fe99-45a5-92dd-310bd9d6b42a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.608333] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066373} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.609331] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.610240] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e64d0f-9ef5-4c18-873e-54ee6be72ab0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.646265] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66152746-7e38-421c-98e1-621e7a82ee59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.657290] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5959a7f0-a39f-416f-92ee-1f612b497dc7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.535s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.667629] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] a84d7a3d-2f7e-459d-94ca-7caa32b7a472/a84d7a3d-2f7e-459d-94ca-7caa32b7a472.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.669685] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-905be332-1f24-4762-86ab-0b84deb52412 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.690163] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dde55f6-9f9e-4da8-8c68-8c7ec20013f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.697652] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "refresh_cache-bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.697878] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "refresh_cache-bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.698153] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.703044] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 897.703044] env[61545]: value = "task-4255945" [ 897.703044] env[61545]: _type = "Task" [ 897.703044] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.719321] env[61545]: DEBUG nova.compute.provider_tree [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.727813] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.774952] env[61545]: DEBUG nova.compute.manager [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 897.777427] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-393f4ed7-b069-47e6-9c40-3c1b2959243c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.786007] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7dd764-e0a4-448c-9460-2d46110468a4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.829161] env[61545]: WARNING nova.virt.vmwareapi.driver [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 could not be found. [ 897.829161] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.829554] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41023b64-72ff-4af2-b182-14417e511227 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.839808] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9152b4f5-bc07-4b9a-933d-ecd020d9eaad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.881552] env[61545]: WARNING nova.virt.vmwareapi.vmops [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26e339f1-182b-4f00-b7c2-a2a32e942d04 could not be found. [ 897.881949] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 897.882124] env[61545]: INFO nova.compute.manager [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Took 0.11 seconds to destroy the instance on the hypervisor. [ 897.882380] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 897.882683] env[61545]: DEBUG nova.compute.manager [-] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 897.882798] env[61545]: DEBUG nova.network.neutron [-] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 898.096393] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255944, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.187513] env[61545]: DEBUG nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 898.219369] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255945, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.224716] env[61545]: DEBUG nova.scheduler.client.report [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.287688] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.592835] env[61545]: DEBUG oslo_vmware.api [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255944, 'name': PowerOnVM_Task, 'duration_secs': 0.677018} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.593448] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.594237] env[61545]: DEBUG nova.compute.manager [None req-939d7224-2b31-4be5-a75a-d4de3cc6bbf0 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 898.595158] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fea258e-5e82-44ed-8c58-0b6a1390754f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.669464] env[61545]: DEBUG nova.network.neutron [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Updating instance_info_cache with network_info: [{"id": "053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7", "address": "fa:16:3e:91:dc:17", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap053e08f5-fc", "ovs_interfaceid": "053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.715276] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255945, 'name': ReconfigVM_Task, 'duration_secs': 0.586385} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.716353] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.716688] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Reconfigured VM instance instance-00000039 to attach disk [datastore2] a84d7a3d-2f7e-459d-94ca-7caa32b7a472/a84d7a3d-2f7e-459d-94ca-7caa32b7a472.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.717378] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1efbd971-192e-477c-b9c6-61226b854a16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.725666] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 898.725666] env[61545]: value = "task-4255946" [ 898.725666] env[61545]: _type = "Task" [ 898.725666] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.730732] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.895s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.731392] env[61545]: DEBUG nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 898.737610] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.698s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.737866] env[61545]: DEBUG nova.objects.instance [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lazy-loading 'resources' on Instance uuid d517f427-8580-481b-b50f-150da6c571b9 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.739215] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255946, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.937226] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "d980f421-03b5-4b0e-b547-a33031356d55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.937539] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "d980f421-03b5-4b0e-b547-a33031356d55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.173174] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "refresh_cache-bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.173174] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Instance network_info: |[{"id": "053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7", "address": "fa:16:3e:91:dc:17", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap053e08f5-fc", "ovs_interfaceid": "053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 899.173174] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:dc:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c80243e-93a7-4a95-bc8d-e9534bacd66e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.182020] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.182252] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.182542] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c9c9ea7-a75f-4fa4-82c8-f5cd52231e4f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.214244] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.214244] env[61545]: value = "task-4255947" [ 899.214244] env[61545]: _type = "Task" [ 899.214244] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.225824] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255947, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.235566] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255946, 'name': Rename_Task, 'duration_secs': 0.289949} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.235868] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.236177] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-349d9b69-e849-4d71-b253-12a48fc33b04 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.242051] env[61545]: DEBUG nova.compute.utils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 899.243719] env[61545]: DEBUG nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 899.244009] env[61545]: DEBUG nova.network.neutron [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.251933] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 899.251933] env[61545]: value = "task-4255948" [ 899.251933] env[61545]: _type = "Task" [ 899.251933] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.262879] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.379526] env[61545]: DEBUG nova.policy [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb00c18cd27541359ae0adf45f5c4171', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa2360863a5f4eff8a88eca0c88fa76d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 899.426346] env[61545]: DEBUG nova.network.neutron [-] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.722202] env[61545]: DEBUG nova.compute.manager [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Received event network-changed-053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 899.722408] env[61545]: DEBUG nova.compute.manager [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Refreshing instance network info cache due to event network-changed-053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 899.722650] env[61545]: DEBUG oslo_concurrency.lockutils [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] Acquiring lock "refresh_cache-bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.722805] env[61545]: DEBUG oslo_concurrency.lockutils [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] Acquired lock "refresh_cache-bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.722992] env[61545]: DEBUG nova.network.neutron [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Refreshing network info cache for port 053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.733641] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255947, 'name': CreateVM_Task, 'duration_secs': 0.425495} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.733641] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.734120] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.734120] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.734251] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 899.737141] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b41bf34d-53e8-4f78-ba0b-85e4cce845b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.744131] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 899.744131] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eef537-f2b1-a7da-3a42-44f25c05be19" [ 899.744131] env[61545]: _type = "Task" [ 899.744131] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.751457] env[61545]: DEBUG nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 899.759496] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eef537-f2b1-a7da-3a42-44f25c05be19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.770584] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255948, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.934559] env[61545]: INFO nova.compute.manager [-] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Took 2.05 seconds to deallocate network for instance. [ 900.044557] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dc343e-e70d-435a-844b-ca004329e3bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.055795] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5f763f-f955-4790-9d0a-3d1244248272 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.090755] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace2d3da-6ffb-45b3-a832-7019d58749dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.094037] env[61545]: DEBUG nova.network.neutron [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Successfully created port: af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.101531] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5fb84a-c296-4654-9f1e-a2156cb7ce07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.117814] env[61545]: DEBUG nova.compute.provider_tree [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.191103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.191103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.191103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.191103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.191103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.194506] env[61545]: INFO nova.compute.manager [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Terminating instance [ 900.255180] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eef537-f2b1-a7da-3a42-44f25c05be19, 'name': SearchDatastore_Task, 'duration_secs': 0.015805} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.255531] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.255883] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.256031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.256191] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.256376] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.256668] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13a009f6-3a34-4be2-bcb4-0b868e668690 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.270557] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.270813] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.275182] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9deb201b-a49b-4c7d-a13e-e4d7f12e1ccb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.278560] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255948, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.285956] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 900.285956] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528806e4-ab6c-ed10-df5e-93f0b33b3258" [ 900.285956] env[61545]: _type = "Task" [ 900.285956] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.296965] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528806e4-ab6c-ed10-df5e-93f0b33b3258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.497363] env[61545]: INFO nova.compute.manager [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Took 0.56 seconds to detach 1 volumes for instance. [ 900.501209] env[61545]: DEBUG nova.compute.manager [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Deleting volume: 7578d319-27d0-4d5d-99aa-9bce7818396d {{(pid=61545) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 900.624478] env[61545]: DEBUG nova.scheduler.client.report [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.700082] env[61545]: DEBUG nova.compute.manager [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 900.700416] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.701386] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329e0e9a-b7e7-4728-9fb5-b62ba1664ab1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.710060] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.710178] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb78c1c2-bca6-47d6-a059-ac4651706163 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.717621] env[61545]: DEBUG oslo_vmware.api [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 900.717621] env[61545]: value = "task-4255950" [ 900.717621] env[61545]: _type = "Task" [ 900.717621] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.718758] env[61545]: DEBUG nova.network.neutron [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Updated VIF entry in instance network info cache for port 053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.719163] env[61545]: DEBUG nova.network.neutron [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Updating instance_info_cache with network_info: [{"id": "053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7", "address": "fa:16:3e:91:dc:17", "network": {"id": "a957f0c7-a727-47d7-abf6-263b871440b1", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1096491342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d2daf4ae4164d9c83882d0e64124316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap053e08f5-fc", "ovs_interfaceid": "053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.734769] env[61545]: DEBUG oslo_vmware.api [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.771260] env[61545]: DEBUG nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 900.777040] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255948, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.800917] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528806e4-ab6c-ed10-df5e-93f0b33b3258, 'name': SearchDatastore_Task, 'duration_secs': 0.030893} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.804279] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3376b2e0-d9a2-49a8-9e38-a417bab06a52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.809524] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 900.809769] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 900.809927] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 900.810154] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 900.810272] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 900.810407] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 900.810614] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 900.810862] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 900.810950] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 900.811896] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 900.811896] env[61545]: DEBUG nova.virt.hardware [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 900.813159] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5430a0b8-d21b-49ed-909d-c713927fb8a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.821508] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 900.821508] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1b1e4-18f2-28e9-002b-ae84c11b6f49" [ 900.821508] env[61545]: _type = "Task" [ 900.821508] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.828296] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083ffa39-9819-4847-b714-ecf021480696 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.839176] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1b1e4-18f2-28e9-002b-ae84c11b6f49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.071593] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.132414] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.394s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.138761] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.847s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.138761] env[61545]: DEBUG nova.objects.instance [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lazy-loading 'resources' on Instance uuid 5b2fb040-a964-479f-ae3f-4f428248d64b {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.232136] env[61545]: DEBUG oslo_concurrency.lockutils [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] Releasing lock "refresh_cache-bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.232403] env[61545]: DEBUG nova.compute.manager [req-9dfc431c-9729-478e-88cc-167777496e3c req-dd00a0b4-8e3f-48f9-ae72-72ec4263900f service nova] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Received event network-vif-deleted-1d429567-3342-4107-b5f0-2596a7002020 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 901.232751] env[61545]: DEBUG oslo_vmware.api [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255950, 'name': PowerOffVM_Task, 'duration_secs': 0.276514} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.232917] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.236111] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 901.236111] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86126601-c615-497c-a130-4d209ac4ee90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.273945] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255948, 'name': PowerOnVM_Task, 'duration_secs': 1.795084} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.274291] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 901.275431] env[61545]: INFO nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Took 10.99 seconds to spawn the instance on the hypervisor. [ 901.275431] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 901.277657] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6397e6-cb27-49ba-962d-b353fd38c823 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.309012] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.309012] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.309012] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Deleting the datastore file [datastore2] e3742aa7-0b26-41f5-b8c0-9388ef2b7e74 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.309375] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebba0e8d-5a89-40ca-b862-b239920e5e2b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.317658] env[61545]: DEBUG oslo_vmware.api [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 901.317658] env[61545]: value = "task-4255952" [ 901.317658] env[61545]: _type = "Task" [ 901.317658] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.327451] env[61545]: DEBUG oslo_vmware.api [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.338727] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1b1e4-18f2-28e9-002b-ae84c11b6f49, 'name': SearchDatastore_Task, 'duration_secs': 0.020305} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.341303] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.341303] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] bea2e59c-02fd-4d6d-8f10-b0e265fa87a2/bea2e59c-02fd-4d6d-8f10-b0e265fa87a2.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 901.341303] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6287d980-776b-4720-b9ac-2ba15403bc69 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.348163] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 901.348163] env[61545]: value = "task-4255953" [ 901.348163] env[61545]: _type = "Task" [ 901.348163] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.362934] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.382559] env[61545]: INFO nova.scheduler.client.report [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleted allocations for instance d517f427-8580-481b-b50f-150da6c571b9 [ 901.814725] env[61545]: INFO nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Took 40.94 seconds to build instance. [ 901.829172] env[61545]: DEBUG oslo_vmware.api [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4255952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283585} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.833995] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.834183] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.834363] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.834539] env[61545]: INFO nova.compute.manager [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Took 1.13 seconds to destroy the instance on the hypervisor. [ 901.834863] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 901.835317] env[61545]: DEBUG nova.compute.manager [-] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 901.835418] env[61545]: DEBUG nova.network.neutron [-] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.865683] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.894461] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b7814f69-2d26-4887-aa98-b1ef7b415726 tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "d517f427-8580-481b-b50f-150da6c571b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.198s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.319708] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.102s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.363840] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255953, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.366607] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88070a7d-913d-4288-864d-39e3029f06bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.377133] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7e988b-9048-4125-aad9-5b300c721699 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.419768] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d10f651-c3ba-49d8-9f72-726579afc8f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.428359] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30f00e0-2150-4bde-9933-ee15713d359e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.448902] env[61545]: DEBUG nova.compute.provider_tree [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.618869] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.619220] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.623023] env[61545]: DEBUG nova.compute.manager [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.623023] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80516718-b215-41bf-ba33-2a7c3594aecb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.630288] env[61545]: DEBUG nova.compute.manager [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 902.630948] env[61545]: DEBUG nova.objects.instance [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lazy-loading 'flavor' on Instance uuid 62301196-fb8a-45fe-9193-0ad8f7126ab5 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.675018] env[61545]: DEBUG nova.network.neutron [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Successfully updated port: af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 902.822884] env[61545]: DEBUG nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 902.863129] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255953, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.327216} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.863446] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] bea2e59c-02fd-4d6d-8f10-b0e265fa87a2/bea2e59c-02fd-4d6d-8f10-b0e265fa87a2.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.863676] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.863961] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba1517b2-04cf-4aeb-b470-93a3b5a4de0f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.871256] env[61545]: DEBUG nova.compute.manager [req-226cda00-7b28-4560-a0d6-bb135c18b86d req-059b9732-8265-45f8-a21e-f5754aac74e3 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Received event network-vif-plugged-af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 902.871256] env[61545]: DEBUG oslo_concurrency.lockutils [req-226cda00-7b28-4560-a0d6-bb135c18b86d req-059b9732-8265-45f8-a21e-f5754aac74e3 service nova] Acquiring lock "56680678-c844-4dd2-8541-d50de83b22d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.871256] env[61545]: DEBUG oslo_concurrency.lockutils [req-226cda00-7b28-4560-a0d6-bb135c18b86d req-059b9732-8265-45f8-a21e-f5754aac74e3 service nova] Lock "56680678-c844-4dd2-8541-d50de83b22d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.871256] env[61545]: DEBUG oslo_concurrency.lockutils [req-226cda00-7b28-4560-a0d6-bb135c18b86d req-059b9732-8265-45f8-a21e-f5754aac74e3 service nova] Lock "56680678-c844-4dd2-8541-d50de83b22d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.871256] env[61545]: DEBUG nova.compute.manager [req-226cda00-7b28-4560-a0d6-bb135c18b86d req-059b9732-8265-45f8-a21e-f5754aac74e3 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] No waiting events found dispatching network-vif-plugged-af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 902.871838] env[61545]: WARNING nova.compute.manager [req-226cda00-7b28-4560-a0d6-bb135c18b86d req-059b9732-8265-45f8-a21e-f5754aac74e3 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Received unexpected event network-vif-plugged-af2c9b85-3238-4b4f-b74f-b72d7b575e73 for instance with vm_state building and task_state spawning. [ 902.874257] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 902.874257] env[61545]: value = "task-4255954" [ 902.874257] env[61545]: _type = "Task" [ 902.874257] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.884701] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255954, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.952479] env[61545]: DEBUG nova.scheduler.client.report [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.097488] env[61545]: DEBUG nova.network.neutron [-] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.178214] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.178214] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.178214] env[61545]: DEBUG nova.network.neutron [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 903.348709] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.386030] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255954, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075512} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.386203] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.387280] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4a75a8-3e14-415e-ae02-59650a5557c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.415096] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] bea2e59c-02fd-4d6d-8f10-b0e265fa87a2/bea2e59c-02fd-4d6d-8f10-b0e265fa87a2.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.415513] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c876b59-07fb-486d-99cb-cba80c5e6602 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.440536] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 903.440536] env[61545]: value = "task-4255955" [ 903.440536] env[61545]: _type = "Task" [ 903.440536] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.452361] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255955, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.460259] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.323s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.460985] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.066s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.461200] env[61545]: DEBUG nova.objects.instance [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 903.481724] env[61545]: INFO nova.scheduler.client.report [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Deleted allocations for instance 5b2fb040-a964-479f-ae3f-4f428248d64b [ 903.600932] env[61545]: INFO nova.compute.manager [-] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Took 1.77 seconds to deallocate network for instance. [ 903.641389] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.641609] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff4b17dc-a4f0-4f39-9591-2c56a9a477fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.650973] env[61545]: DEBUG oslo_vmware.api [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 903.650973] env[61545]: value = "task-4255956" [ 903.650973] env[61545]: _type = "Task" [ 903.650973] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.661765] env[61545]: DEBUG oslo_vmware.api [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.728534] env[61545]: DEBUG nova.network.neutron [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.958466] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255955, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.965220] env[61545]: DEBUG nova.network.neutron [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Updating instance_info_cache with network_info: [{"id": "af2c9b85-3238-4b4f-b74f-b72d7b575e73", "address": "fa:16:3e:78:96:58", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2c9b85-32", "ovs_interfaceid": "af2c9b85-3238-4b4f-b74f-b72d7b575e73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.991333] env[61545]: DEBUG oslo_concurrency.lockutils [None req-33c1e9ba-89c7-41e5-aa42-f73b8937f1ef tempest-ListServersNegativeTestJSON-2137231986 tempest-ListServersNegativeTestJSON-2137231986-project-member] Lock "5b2fb040-a964-479f-ae3f-4f428248d64b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.117s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.109794] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.163400] env[61545]: DEBUG oslo_vmware.api [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255956, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.455732] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255955, 'name': ReconfigVM_Task, 'duration_secs': 0.698195} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.456025] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Reconfigured VM instance instance-0000003a to attach disk [datastore2] bea2e59c-02fd-4d6d-8f10-b0e265fa87a2/bea2e59c-02fd-4d6d-8f10-b0e265fa87a2.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.456678] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e44183f-fcd0-4c42-9a8c-39a68a009327 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.464210] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 904.464210] env[61545]: value = "task-4255957" [ 904.464210] env[61545]: _type = "Task" [ 904.464210] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.478366] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b14db4a6-9348-4233-b422-e4939c0d970d tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.478366] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.478366] env[61545]: DEBUG nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Instance network_info: |[{"id": "af2c9b85-3238-4b4f-b74f-b72d7b575e73", "address": "fa:16:3e:78:96:58", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2c9b85-32", "ovs_interfaceid": "af2c9b85-3238-4b4f-b74f-b72d7b575e73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 904.478366] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255957, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.478810] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.930s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.479167] env[61545]: DEBUG nova.objects.instance [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lazy-loading 'resources' on Instance uuid 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.480662] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:96:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af2c9b85-3238-4b4f-b74f-b72d7b575e73', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 904.489031] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.490485] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 904.494016] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04bfc769-f48a-44f7-8a83-6f28a8e7b20c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.517025] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 904.517025] env[61545]: value = "task-4255958" [ 904.517025] env[61545]: _type = "Task" [ 904.517025] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.527743] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255958, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.667341] env[61545]: DEBUG oslo_vmware.api [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255956, 'name': PowerOffVM_Task, 'duration_secs': 0.593151} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.667648] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 904.667885] env[61545]: DEBUG nova.compute.manager [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.668745] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52072222-520e-4bd6-93b4-07374c2140e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.956610] env[61545]: DEBUG nova.compute.manager [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Received event network-vif-deleted-8564beef-15a1-4c85-b090-c45df7394356 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 904.956978] env[61545]: DEBUG nova.compute.manager [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Received event network-changed-af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 904.957194] env[61545]: DEBUG nova.compute.manager [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Refreshing instance network info cache due to event network-changed-af2c9b85-3238-4b4f-b74f-b72d7b575e73. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 904.957412] env[61545]: DEBUG oslo_concurrency.lockutils [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] Acquiring lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.957541] env[61545]: DEBUG oslo_concurrency.lockutils [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] Acquired lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.957694] env[61545]: DEBUG nova.network.neutron [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Refreshing network info cache for port af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 904.976544] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255957, 'name': Rename_Task, 'duration_secs': 0.222121} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.976845] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.977480] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8b85124-a231-4ef5-955a-a18753793169 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.985219] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 904.985219] env[61545]: value = "task-4255959" [ 904.985219] env[61545]: _type = "Task" [ 904.985219] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.001831] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.029944] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255958, 'name': CreateVM_Task, 'duration_secs': 0.37742} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.030341] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.032027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.032135] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.033132] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 905.033768] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ed5142-dadd-47e0-9e66-d04b24234d82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.040920] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 905.040920] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528b8cd2-f27f-b617-cb20-113e6321beff" [ 905.040920] env[61545]: _type = "Task" [ 905.040920] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.056334] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528b8cd2-f27f-b617-cb20-113e6321beff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.185019] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3e6706d-36dd-446b-8230-fc0580459d1d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.565s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.502061] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255959, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.553260] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528b8cd2-f27f-b617-cb20-113e6321beff, 'name': SearchDatastore_Task, 'duration_secs': 0.011621} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.553623] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.553946] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.554178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.554329] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.554543] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.555058] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e14b9fc5-42a2-4d66-a9be-0c160cb9029d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.568446] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.568661] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 905.569487] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33ddab36-ffac-482d-8ea9-826956928014 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.578401] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 905.578401] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9304e-582d-d221-143a-0a316716c04c" [ 905.578401] env[61545]: _type = "Task" [ 905.578401] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.592170] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9304e-582d-d221-143a-0a316716c04c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.752784] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd9d18c-8159-40cb-94bc-279c7553c967 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.768724] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0535ae-7b53-4e7f-99e5-2cf23f72a838 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.809333] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80764a01-eae6-4b63-b40e-3bf9a992ee3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.817958] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fc7654-0109-4c14-9f8b-af86baa6177b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.832652] env[61545]: DEBUG nova.compute.provider_tree [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.913711] env[61545]: DEBUG nova.network.neutron [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Updated VIF entry in instance network info cache for port af2c9b85-3238-4b4f-b74f-b72d7b575e73. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.914098] env[61545]: DEBUG nova.network.neutron [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Updating instance_info_cache with network_info: [{"id": "af2c9b85-3238-4b4f-b74f-b72d7b575e73", "address": "fa:16:3e:78:96:58", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2c9b85-32", "ovs_interfaceid": "af2c9b85-3238-4b4f-b74f-b72d7b575e73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.997538] env[61545]: DEBUG oslo_vmware.api [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255959, 'name': PowerOnVM_Task, 'duration_secs': 0.655147} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.997930] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.998088] env[61545]: INFO nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Took 10.57 seconds to spawn the instance on the hypervisor. [ 905.998218] env[61545]: DEBUG nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 905.999076] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc77050-cadf-4d5f-9022-e3eccfb0f273 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.091022] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9304e-582d-d221-143a-0a316716c04c, 'name': SearchDatastore_Task, 'duration_secs': 0.017321} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.092365] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3f25015-7417-4c92-b194-1c8d45c1a21d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.099033] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 906.099033] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523cd92b-1aa1-576e-2ff1-3a263fbbeeca" [ 906.099033] env[61545]: _type = "Task" [ 906.099033] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.108352] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523cd92b-1aa1-576e-2ff1-3a263fbbeeca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.338020] env[61545]: DEBUG nova.scheduler.client.report [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.391926] env[61545]: DEBUG nova.objects.instance [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lazy-loading 'flavor' on Instance uuid 62301196-fb8a-45fe-9193-0ad8f7126ab5 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.418106] env[61545]: DEBUG oslo_concurrency.lockutils [req-47ac4c58-ded8-4112-95a1-ec30dd57b2fc req-1617d449-0468-4252-bd51-b6b41980a846 service nova] Releasing lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.516450] env[61545]: INFO nova.compute.manager [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Took 42.44 seconds to build instance. [ 906.610612] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523cd92b-1aa1-576e-2ff1-3a263fbbeeca, 'name': SearchDatastore_Task, 'duration_secs': 0.012051} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.611387] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.611387] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7/56680678-c844-4dd2-8541-d50de83b22d7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.611689] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-297a3e6b-8ffa-45a6-9ee4-4ddfa7b72419 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.619471] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 906.619471] env[61545]: value = "task-4255960" [ 906.619471] env[61545]: _type = "Task" [ 906.619471] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.628570] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255960, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.842983] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.365s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.845538] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.856s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.847629] env[61545]: INFO nova.compute.claims [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.884053] env[61545]: INFO nova.scheduler.client.report [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Deleted allocations for instance 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2 [ 906.897579] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.897764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquired lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.897945] env[61545]: DEBUG nova.network.neutron [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.898151] env[61545]: DEBUG nova.objects.instance [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lazy-loading 'info_cache' on Instance uuid 62301196-fb8a-45fe-9193-0ad8f7126ab5 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.019352] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f70ff8fa-637e-4c99-87b4-40dc837a11c0 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.756s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.131027] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255960, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.312446] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.312736] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.312961] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.313173] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.313350] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.315678] env[61545]: INFO nova.compute.manager [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Terminating instance [ 907.392104] env[61545]: DEBUG oslo_concurrency.lockutils [None req-250f27aa-d23d-4451-8868-85110dfc1e1a tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "7d2bad05-c461-43b9-9dd0-bdefbd33e3a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.525s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.406294] env[61545]: DEBUG nova.objects.base [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Object Instance<62301196-fb8a-45fe-9193-0ad8f7126ab5> lazy-loaded attributes: flavor,info_cache {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 907.407693] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.407934] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.408143] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.408330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.408530] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.410858] env[61545]: INFO nova.compute.manager [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Terminating instance [ 907.522966] env[61545]: DEBUG nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 907.631539] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255960, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522927} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.631702] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7/56680678-c844-4dd2-8541-d50de83b22d7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 907.631929] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 907.632198] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd46f90d-3cad-42ac-b708-7a85b3380dfc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.639398] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 907.639398] env[61545]: value = "task-4255961" [ 907.639398] env[61545]: _type = "Task" [ 907.639398] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.652176] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255961, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.820139] env[61545]: DEBUG nova.compute.manager [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.820439] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.821795] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe51fb3b-427f-4e29-9b35-d47272b58a70 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.831429] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.831429] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c38b3fd3-1466-41a9-b6a9-8a273c7bafc9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.838585] env[61545]: DEBUG oslo_vmware.api [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 907.838585] env[61545]: value = "task-4255962" [ 907.838585] env[61545]: _type = "Task" [ 907.838585] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.849479] env[61545]: DEBUG oslo_vmware.api [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255962, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.917390] env[61545]: DEBUG nova.compute.manager [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.917530] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.919016] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98226adb-ec8c-410c-9898-2f46f6d66b6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.924211] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.924527] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.924771] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.925076] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.925216] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.929522] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.929808] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6138d37-2733-40ee-80b9-1cd803b6d214 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.931940] env[61545]: INFO nova.compute.manager [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Terminating instance [ 907.942723] env[61545]: DEBUG oslo_vmware.api [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 907.942723] env[61545]: value = "task-4255963" [ 907.942723] env[61545]: _type = "Task" [ 907.942723] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.961233] env[61545]: DEBUG oslo_vmware.api [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.046520] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.153357] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255961, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.248086} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.153859] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.154682] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6756c9c3-8737-43d8-a782-bca46a013f5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.182826] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7/56680678-c844-4dd2-8541-d50de83b22d7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.185727] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43510e0d-efd0-4903-8401-f7f89c39e756 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.208523] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 908.208523] env[61545]: value = "task-4255964" [ 908.208523] env[61545]: _type = "Task" [ 908.208523] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.218880] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255964, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.262652] env[61545]: DEBUG nova.network.neutron [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Updating instance_info_cache with network_info: [{"id": "52cec3f8-5316-4f38-86e3-82087b8e5fac", "address": "fa:16:3e:07:8c:49", "network": {"id": "52aa1a4f-f5e8-43dc-86e9-7e76d0ac2269", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1533925293-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd3ea97039a04935931355f1b8c10ed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cec3f8-53", "ovs_interfaceid": "52cec3f8-5316-4f38-86e3-82087b8e5fac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.352300] env[61545]: DEBUG oslo_vmware.api [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255962, 'name': PowerOffVM_Task, 'duration_secs': 0.239329} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.352575] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.352750] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.353020] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a741888-751f-4406-b1e6-c3dbf229983c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.415396] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.416047] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.416047] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleting the datastore file [datastore2] a84d7a3d-2f7e-459d-94ca-7caa32b7a472 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.418801] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1909716-120d-450b-859d-5dea0ab12f3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.425382] env[61545]: DEBUG oslo_vmware.api [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 908.425382] env[61545]: value = "task-4255966" [ 908.425382] env[61545]: _type = "Task" [ 908.425382] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.434042] env[61545]: DEBUG oslo_vmware.api [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.442016] env[61545]: DEBUG nova.compute.manager [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 908.442016] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 908.442016] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca854958-3bdd-429d-8a6e-25b2414b5220 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.453806] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.453806] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8f5fded-480c-4670-b67d-30f1148d36c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.457542] env[61545]: DEBUG oslo_vmware.api [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255963, 'name': PowerOffVM_Task, 'duration_secs': 0.189106} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.458113] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.458284] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.458547] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f47abc91-5071-4c49-a712-bc06bcdaaabb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.465570] env[61545]: DEBUG oslo_vmware.api [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 908.465570] env[61545]: value = "task-4255967" [ 908.465570] env[61545]: _type = "Task" [ 908.465570] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.473607] env[61545]: DEBUG oslo_vmware.api [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.491199] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b706a1d0-8add-46b2-9642-7e3da3f8da95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.497717] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17906a5f-ec2d-4dd0-b102-b450a84edc70 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.542014] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099e8bd0-9ee9-4b5b-a665-b712a82c099b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.548179] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.548179] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.548380] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleting the datastore file [datastore2] bea2e59c-02fd-4d6d-8f10-b0e265fa87a2 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.549031] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abbf40ed-4797-4616-99e2-e242df014424 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.559314] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27082bc9-7b96-4d01-808e-d328e1a17021 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.566398] env[61545]: DEBUG oslo_vmware.api [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for the task: (returnval){ [ 908.566398] env[61545]: value = "task-4255969" [ 908.566398] env[61545]: _type = "Task" [ 908.566398] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.576432] env[61545]: DEBUG nova.compute.provider_tree [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.583707] env[61545]: DEBUG oslo_vmware.api [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.718928] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255964, 'name': ReconfigVM_Task, 'duration_secs': 0.455518} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.719243] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7/56680678-c844-4dd2-8541-d50de83b22d7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.719992] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8515e8c6-10e1-46c6-839f-90351072b758 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.727176] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 908.727176] env[61545]: value = "task-4255970" [ 908.727176] env[61545]: _type = "Task" [ 908.727176] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.736472] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255970, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.765984] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Releasing lock "refresh_cache-62301196-fb8a-45fe-9193-0ad8f7126ab5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.936188] env[61545]: DEBUG oslo_vmware.api [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251505} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.936456] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.936590] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.936774] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.936972] env[61545]: INFO nova.compute.manager [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Took 1.12 seconds to destroy the instance on the hypervisor. [ 908.937238] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.937429] env[61545]: DEBUG nova.compute.manager [-] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.937526] env[61545]: DEBUG nova.network.neutron [-] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.977019] env[61545]: DEBUG oslo_vmware.api [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255967, 'name': PowerOffVM_Task, 'duration_secs': 0.222179} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.977349] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.977519] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.977773] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c83f68f2-fcf3-4e38-a89c-e3b696e3f751 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.042302] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 909.042695] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 909.042889] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Deleting the datastore file [datastore2] 5f4d6338-d1af-4e58-9f76-5e95d51e76f7 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 909.043208] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc29a9b4-8e4d-4401-a53d-a775b0647352 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.050085] env[61545]: DEBUG oslo_vmware.api [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for the task: (returnval){ [ 909.050085] env[61545]: value = "task-4255972" [ 909.050085] env[61545]: _type = "Task" [ 909.050085] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.061185] env[61545]: DEBUG oslo_vmware.api [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255972, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.074492] env[61545]: DEBUG oslo_vmware.api [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Task: {'id': task-4255969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232709} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.074908] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 909.075252] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 909.075576] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 909.075886] env[61545]: INFO nova.compute.manager [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 909.076294] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 909.076598] env[61545]: DEBUG nova.compute.manager [-] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 909.076764] env[61545]: DEBUG nova.network.neutron [-] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 909.080213] env[61545]: DEBUG nova.scheduler.client.report [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.169764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquiring lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.169764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.169764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquiring lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.169764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.169764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.172444] env[61545]: INFO nova.compute.manager [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Terminating instance [ 909.238895] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255970, 'name': Rename_Task, 'duration_secs': 0.154638} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.239215] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.239504] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86132a10-9c0c-4146-b943-bd2b852b68e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.246915] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 909.246915] env[61545]: value = "task-4255973" [ 909.246915] env[61545]: _type = "Task" [ 909.246915] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.256595] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.570136] env[61545]: DEBUG oslo_vmware.api [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Task: {'id': task-4255972, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136155} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.570136] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 909.570136] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 909.570136] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 909.570136] env[61545]: INFO nova.compute.manager [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 909.570136] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 909.570136] env[61545]: DEBUG nova.compute.manager [-] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 909.570136] env[61545]: DEBUG nova.network.neutron [-] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 909.590402] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.590402] env[61545]: DEBUG nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 909.591787] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.558s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.592975] env[61545]: DEBUG nova.objects.instance [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lazy-loading 'resources' on Instance uuid ecf98c79-da3d-44be-9c76-c3fccc688235 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.675142] env[61545]: DEBUG nova.compute.manager [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.675441] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.676368] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab8f1dc-b913-44de-8d9b-2d69ecfd84c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.685224] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.685469] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94153f25-37e4-4ed7-9384-166128dade3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.694390] env[61545]: DEBUG oslo_vmware.api [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 909.694390] env[61545]: value = "task-4255974" [ 909.694390] env[61545]: _type = "Task" [ 909.694390] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.706325] env[61545]: DEBUG oslo_vmware.api [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.759615] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255973, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.772126] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.772459] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2bc3f3d-d02a-4329-ac3f-dd7d981c48ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.782214] env[61545]: DEBUG oslo_vmware.api [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 909.782214] env[61545]: value = "task-4255975" [ 909.782214] env[61545]: _type = "Task" [ 909.782214] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.805414] env[61545]: DEBUG oslo_vmware.api [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.936241] env[61545]: DEBUG nova.compute.manager [req-62ebab89-a291-4ba6-8fd7-24275b9b05ea req-5bb30f38-2d2f-4034-a06d-d08c75eecb2d service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Received event network-vif-deleted-d04fca3a-1fcc-4b30-b0e7-83c25fe97200 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 909.936241] env[61545]: INFO nova.compute.manager [req-62ebab89-a291-4ba6-8fd7-24275b9b05ea req-5bb30f38-2d2f-4034-a06d-d08c75eecb2d service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Neutron deleted interface d04fca3a-1fcc-4b30-b0e7-83c25fe97200; detaching it from the instance and deleting it from the info cache [ 909.936241] env[61545]: DEBUG nova.network.neutron [req-62ebab89-a291-4ba6-8fd7-24275b9b05ea req-5bb30f38-2d2f-4034-a06d-d08c75eecb2d service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.096389] env[61545]: DEBUG nova.compute.utils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 910.100318] env[61545]: DEBUG nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 910.100493] env[61545]: DEBUG nova.network.neutron [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 910.103043] env[61545]: DEBUG nova.network.neutron [-] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.149246] env[61545]: DEBUG nova.compute.manager [req-282e06ce-77b0-4f84-8d16-1171c7060e82 req-e847a59f-d8a9-43f7-abd8-868b021732f9 service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Received event network-vif-deleted-f6cead7f-5ede-4097-9f73-f9849bdc96bc {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 910.149493] env[61545]: INFO nova.compute.manager [req-282e06ce-77b0-4f84-8d16-1171c7060e82 req-e847a59f-d8a9-43f7-abd8-868b021732f9 service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Neutron deleted interface f6cead7f-5ede-4097-9f73-f9849bdc96bc; detaching it from the instance and deleting it from the info cache [ 910.150170] env[61545]: DEBUG nova.network.neutron [req-282e06ce-77b0-4f84-8d16-1171c7060e82 req-e847a59f-d8a9-43f7-abd8-868b021732f9 service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.152306] env[61545]: DEBUG nova.policy [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb62f61940d74d3e9db31ff7f3e2456b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39f1a55fa71c4ec28278ebd71a4bf4d3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 910.209092] env[61545]: DEBUG oslo_vmware.api [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255974, 'name': PowerOffVM_Task, 'duration_secs': 0.214889} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.209828] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.209828] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.211206] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d781e24-a7c5-4af0-ae40-1f84b346e6ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.261193] env[61545]: DEBUG oslo_vmware.api [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4255973, 'name': PowerOnVM_Task, 'duration_secs': 0.797342} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.261193] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.261296] env[61545]: INFO nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Took 9.49 seconds to spawn the instance on the hypervisor. [ 910.261531] env[61545]: DEBUG nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.263969] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0639a527-a7c1-4567-b3fd-07a1d54d0d42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.266875] env[61545]: DEBUG nova.network.neutron [-] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.301527] env[61545]: DEBUG oslo_vmware.api [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255975, 'name': PowerOnVM_Task, 'duration_secs': 0.46844} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.306372] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.306713] env[61545]: DEBUG nova.compute.manager [None req-93b94d05-3f35-4bfb-b8f0-6b04ca74693d tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.307126] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.308049] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.308049] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Deleting the datastore file [datastore2] 9cf6dd9e-40e9-4df6-9342-2850e0f93d85 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.309499] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76de6805-c0b6-44c3-ad57-e81803ba399f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.312758] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c5b7510-b6e2-452d-9b9a-4f740108b1ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.324457] env[61545]: DEBUG oslo_vmware.api [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for the task: (returnval){ [ 910.324457] env[61545]: value = "task-4255977" [ 910.324457] env[61545]: _type = "Task" [ 910.324457] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.337133] env[61545]: DEBUG oslo_vmware.api [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.368754] env[61545]: DEBUG nova.network.neutron [-] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.440594] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9ca68e8-7f7b-43ea-977e-94de4e320ac8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.453141] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23b2282-f203-44eb-987b-86e929a08ce4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.494787] env[61545]: DEBUG nova.compute.manager [req-62ebab89-a291-4ba6-8fd7-24275b9b05ea req-5bb30f38-2d2f-4034-a06d-d08c75eecb2d service nova] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Detach interface failed, port_id=d04fca3a-1fcc-4b30-b0e7-83c25fe97200, reason: Instance a84d7a3d-2f7e-459d-94ca-7caa32b7a472 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 910.496132] env[61545]: DEBUG nova.network.neutron [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Successfully created port: 1e891b2e-5640-4992-b111-00dfc1ad763a {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.608801] env[61545]: DEBUG nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 910.611412] env[61545]: INFO nova.compute.manager [-] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Took 1.67 seconds to deallocate network for instance. [ 910.656499] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e08cee99-392d-4ac4-9a0b-a8c89ad34194 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.666423] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a3c745-e250-48c7-90b1-605cfb2c3c5f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.708057] env[61545]: DEBUG nova.compute.manager [req-282e06ce-77b0-4f84-8d16-1171c7060e82 req-e847a59f-d8a9-43f7-abd8-868b021732f9 service nova] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Detach interface failed, port_id=f6cead7f-5ede-4097-9f73-f9849bdc96bc, reason: Instance 5f4d6338-d1af-4e58-9f76-5e95d51e76f7 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 910.771549] env[61545]: INFO nova.compute.manager [-] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Took 1.69 seconds to deallocate network for instance. [ 910.787019] env[61545]: INFO nova.compute.manager [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Took 40.02 seconds to build instance. [ 910.842244] env[61545]: DEBUG oslo_vmware.api [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Task: {'id': task-4255977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292806} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.842605] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.843912] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.843912] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.843912] env[61545]: INFO nova.compute.manager [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Took 1.17 seconds to destroy the instance on the hypervisor. [ 910.843912] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.843912] env[61545]: DEBUG nova.compute.manager [-] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 910.843912] env[61545]: DEBUG nova.network.neutron [-] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 910.872751] env[61545]: INFO nova.compute.manager [-] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Took 1.30 seconds to deallocate network for instance. [ 910.884447] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3549fe-da4f-4c0f-9042-927cbfd1c70a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.893458] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798a7ddb-a939-4e3b-8f89-f3a22740f5ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.933314] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b05a9ad-cecb-4028-9d69-31539a688345 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.942700] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dfc06f-c085-4e6f-be18-333e3d63440c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.958591] env[61545]: DEBUG nova.compute.provider_tree [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.116920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.281605] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.291948] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c408a0-f99b-4fee-9381-25e61e7b9a90 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "56680678-c844-4dd2-8541-d50de83b22d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.457s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.380242] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.463341] env[61545]: DEBUG nova.scheduler.client.report [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 911.619087] env[61545]: DEBUG nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 911.656012] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 911.656795] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.657464] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 911.659150] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.659420] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 911.659714] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 911.662485] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 911.662656] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 911.662910] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 911.663785] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 911.663785] env[61545]: DEBUG nova.virt.hardware [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 911.664615] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f34c0d-b11c-4ce8-bae4-f7f2321f1c02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.675643] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab11e183-7abd-46e0-ae37-09073200aed8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.801094] env[61545]: DEBUG nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 911.969313] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.377s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.972700] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.276s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.002728] env[61545]: INFO nova.scheduler.client.report [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Deleted allocations for instance ecf98c79-da3d-44be-9c76-c3fccc688235 [ 912.091563] env[61545]: DEBUG nova.compute.manager [req-98b76673-7c1b-4fb3-8a08-bedb2117bea5 req-6e85c0e4-5018-43b9-98e4-f2e66824851b service nova] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Received event network-vif-deleted-053e08f5-fcde-42f0-ad1c-6a22a6f1ceb7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 912.091563] env[61545]: DEBUG nova.compute.manager [req-98b76673-7c1b-4fb3-8a08-bedb2117bea5 req-6e85c0e4-5018-43b9-98e4-f2e66824851b service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Received event network-vif-deleted-3f0de1dd-63b8-4054-827e-0daae86eaaa5 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 912.091563] env[61545]: INFO nova.compute.manager [req-98b76673-7c1b-4fb3-8a08-bedb2117bea5 req-6e85c0e4-5018-43b9-98e4-f2e66824851b service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Neutron deleted interface 3f0de1dd-63b8-4054-827e-0daae86eaaa5; detaching it from the instance and deleting it from the info cache [ 912.091563] env[61545]: DEBUG nova.network.neutron [req-98b76673-7c1b-4fb3-8a08-bedb2117bea5 req-6e85c0e4-5018-43b9-98e4-f2e66824851b service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.199306] env[61545]: DEBUG nova.network.neutron [-] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.211440] env[61545]: DEBUG nova.network.neutron [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Successfully updated port: 1e891b2e-5640-4992-b111-00dfc1ad763a {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.275041] env[61545]: DEBUG nova.compute.manager [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Received event network-changed-af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 912.275851] env[61545]: DEBUG nova.compute.manager [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Refreshing instance network info cache due to event network-changed-af2c9b85-3238-4b4f-b74f-b72d7b575e73. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 912.276562] env[61545]: DEBUG oslo_concurrency.lockutils [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] Acquiring lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.279034] env[61545]: DEBUG oslo_concurrency.lockutils [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] Acquired lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.279034] env[61545]: DEBUG nova.network.neutron [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Refreshing network info cache for port af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 912.323065] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.478725] env[61545]: INFO nova.compute.claims [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 912.521385] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bf1d0799-5e00-4b9c-9d97-7a0361434199 tempest-AttachInterfacesV270Test-504789276 tempest-AttachInterfacesV270Test-504789276-project-member] Lock "ecf98c79-da3d-44be-9c76-c3fccc688235" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.103s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.595443] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3fcc62c8-82fd-4eaf-b358-3d663d7012ca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.608179] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b776fee8-c6cd-4847-933f-9f74715b7ab1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.646583] env[61545]: DEBUG nova.compute.manager [req-98b76673-7c1b-4fb3-8a08-bedb2117bea5 req-6e85c0e4-5018-43b9-98e4-f2e66824851b service nova] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Detach interface failed, port_id=3f0de1dd-63b8-4054-827e-0daae86eaaa5, reason: Instance 9cf6dd9e-40e9-4df6-9342-2850e0f93d85 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 912.709350] env[61545]: INFO nova.compute.manager [-] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Took 1.87 seconds to deallocate network for instance. [ 912.717915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "refresh_cache-1537dbf0-d1b6-410f-8333-788761dd24d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.718069] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquired lock "refresh_cache-1537dbf0-d1b6-410f-8333-788761dd24d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.718479] env[61545]: DEBUG nova.network.neutron [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.987044] env[61545]: INFO nova.compute.resource_tracker [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating resource usage from migration cba5117b-fb7a-4947-b2b6-06dabedaf661 [ 913.107729] env[61545]: DEBUG nova.network.neutron [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Updated VIF entry in instance network info cache for port af2c9b85-3238-4b4f-b74f-b72d7b575e73. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.107861] env[61545]: DEBUG nova.network.neutron [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Updating instance_info_cache with network_info: [{"id": "af2c9b85-3238-4b4f-b74f-b72d7b575e73", "address": "fa:16:3e:78:96:58", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2c9b85-32", "ovs_interfaceid": "af2c9b85-3238-4b4f-b74f-b72d7b575e73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.221079] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.278808] env[61545]: DEBUG nova.network.neutron [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.522825] env[61545]: DEBUG nova.network.neutron [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Updating instance_info_cache with network_info: [{"id": "1e891b2e-5640-4992-b111-00dfc1ad763a", "address": "fa:16:3e:4d:63:6e", "network": {"id": "d43bb417-a08b-414d-aadd-b9e754ff3612", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1610703633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39f1a55fa71c4ec28278ebd71a4bf4d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e891b2e-56", "ovs_interfaceid": "1e891b2e-5640-4992-b111-00dfc1ad763a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.612243] env[61545]: DEBUG oslo_concurrency.lockutils [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] Releasing lock "refresh_cache-56680678-c844-4dd2-8541-d50de83b22d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.612433] env[61545]: DEBUG nova.compute.manager [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Received event network-vif-plugged-1e891b2e-5640-4992-b111-00dfc1ad763a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 913.612582] env[61545]: DEBUG oslo_concurrency.lockutils [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] Acquiring lock "1537dbf0-d1b6-410f-8333-788761dd24d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.612756] env[61545]: DEBUG oslo_concurrency.lockutils [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] Lock "1537dbf0-d1b6-410f-8333-788761dd24d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.612929] env[61545]: DEBUG oslo_concurrency.lockutils [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] Lock "1537dbf0-d1b6-410f-8333-788761dd24d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.613123] env[61545]: DEBUG nova.compute.manager [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] No waiting events found dispatching network-vif-plugged-1e891b2e-5640-4992-b111-00dfc1ad763a {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 913.613377] env[61545]: WARNING nova.compute.manager [req-b646ffeb-a0b1-4842-a6a3-a134e0985384 req-3f67b4e9-66d2-4ed7-b426-ac744c3d2357 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Received unexpected event network-vif-plugged-1e891b2e-5640-4992-b111-00dfc1ad763a for instance with vm_state building and task_state spawning. [ 913.652437] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58fbd00-c6d5-40d0-a792-b597f159dcf9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.661449] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b505e5-0a7d-4e2d-b708-9484253fac7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.695045] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c199840b-b1f5-4015-a9c4-c6de57d8efe4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.703579] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8530e2fe-3988-4116-b892-0ee78c93afd4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.718849] env[61545]: DEBUG nova.compute.provider_tree [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.027276] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Releasing lock "refresh_cache-1537dbf0-d1b6-410f-8333-788761dd24d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.028584] env[61545]: DEBUG nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Instance network_info: |[{"id": "1e891b2e-5640-4992-b111-00dfc1ad763a", "address": "fa:16:3e:4d:63:6e", "network": {"id": "d43bb417-a08b-414d-aadd-b9e754ff3612", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1610703633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39f1a55fa71c4ec28278ebd71a4bf4d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e891b2e-56", "ovs_interfaceid": "1e891b2e-5640-4992-b111-00dfc1ad763a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 914.031012] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:63:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15165046-2de9-4ada-9e99-0126e20854a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e891b2e-5640-4992-b111-00dfc1ad763a', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.037579] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 914.038091] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 914.038597] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c7866d3-957c-45af-bd80-5b13c568e76a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.065015] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.065015] env[61545]: value = "task-4255978" [ 914.065015] env[61545]: _type = "Task" [ 914.065015] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.077332] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255978, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.224176] env[61545]: DEBUG nova.scheduler.client.report [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 914.332140] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.333297] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.333822] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.334302] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.334775] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.339238] env[61545]: INFO nova.compute.manager [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Terminating instance [ 914.344318] env[61545]: DEBUG nova.compute.manager [req-fdbd7c69-cef7-490f-990f-8b51772df0e6 req-0facf425-483e-47b6-85f1-e8cedd365ce4 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Received event network-changed-1e891b2e-5640-4992-b111-00dfc1ad763a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 914.344584] env[61545]: DEBUG nova.compute.manager [req-fdbd7c69-cef7-490f-990f-8b51772df0e6 req-0facf425-483e-47b6-85f1-e8cedd365ce4 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Refreshing instance network info cache due to event network-changed-1e891b2e-5640-4992-b111-00dfc1ad763a. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 914.344810] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdbd7c69-cef7-490f-990f-8b51772df0e6 req-0facf425-483e-47b6-85f1-e8cedd365ce4 service nova] Acquiring lock "refresh_cache-1537dbf0-d1b6-410f-8333-788761dd24d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.344984] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdbd7c69-cef7-490f-990f-8b51772df0e6 req-0facf425-483e-47b6-85f1-e8cedd365ce4 service nova] Acquired lock "refresh_cache-1537dbf0-d1b6-410f-8333-788761dd24d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.345184] env[61545]: DEBUG nova.network.neutron [req-fdbd7c69-cef7-490f-990f-8b51772df0e6 req-0facf425-483e-47b6-85f1-e8cedd365ce4 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Refreshing network info cache for port 1e891b2e-5640-4992-b111-00dfc1ad763a {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 914.578894] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4255978, 'name': CreateVM_Task, 'duration_secs': 0.358474} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.579283] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.579902] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.580119] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.580486] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 914.580888] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5d52e42-e480-4d00-9c47-376884efcb75 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.588046] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 914.588046] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527ea576-0b75-85d2-cf41-024bf8e3cf0c" [ 914.588046] env[61545]: _type = "Task" [ 914.588046] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.597936] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527ea576-0b75-85d2-cf41-024bf8e3cf0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.730370] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.758s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.730698] env[61545]: INFO nova.compute.manager [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Migrating [ 914.738939] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.815s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.739269] env[61545]: DEBUG nova.objects.instance [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lazy-loading 'resources' on Instance uuid 602bd42d-6afa-4419-8352-73a9daab2fe0 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.851827] env[61545]: DEBUG nova.compute.manager [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 914.852180] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.854169] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a1aeb5-d232-4376-ae8a-0f1703405c3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.865689] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.865969] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9558a9f3-b5d6-4f20-9e09-ceae52f4a499 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.874381] env[61545]: DEBUG oslo_vmware.api [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 914.874381] env[61545]: value = "task-4255979" [ 914.874381] env[61545]: _type = "Task" [ 914.874381] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.887400] env[61545]: DEBUG oslo_vmware.api [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.095207] env[61545]: DEBUG nova.network.neutron [req-fdbd7c69-cef7-490f-990f-8b51772df0e6 req-0facf425-483e-47b6-85f1-e8cedd365ce4 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Updated VIF entry in instance network info cache for port 1e891b2e-5640-4992-b111-00dfc1ad763a. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 915.095616] env[61545]: DEBUG nova.network.neutron [req-fdbd7c69-cef7-490f-990f-8b51772df0e6 req-0facf425-483e-47b6-85f1-e8cedd365ce4 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Updating instance_info_cache with network_info: [{"id": "1e891b2e-5640-4992-b111-00dfc1ad763a", "address": "fa:16:3e:4d:63:6e", "network": {"id": "d43bb417-a08b-414d-aadd-b9e754ff3612", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1610703633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39f1a55fa71c4ec28278ebd71a4bf4d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e891b2e-56", "ovs_interfaceid": "1e891b2e-5640-4992-b111-00dfc1ad763a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.103317] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527ea576-0b75-85d2-cf41-024bf8e3cf0c, 'name': SearchDatastore_Task, 'duration_secs': 0.011378} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.103688] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.103979] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.105425] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.105616] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.105827] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.106154] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ce4ead9-ac2e-4ff7-b721-7ef27bfb3497 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.251178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.251398] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.251622] env[61545]: DEBUG nova.network.neutron [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.274713] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.274713] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 915.275629] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-793dae60-4d78-44c9-9658-40e8e0b9d54b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.283139] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 915.283139] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cfa5bc-2238-f0f5-5d27-01d8cd3e88b3" [ 915.283139] env[61545]: _type = "Task" [ 915.283139] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.296306] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cfa5bc-2238-f0f5-5d27-01d8cd3e88b3, 'name': SearchDatastore_Task, 'duration_secs': 0.010899} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.297143] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52b2cc26-d1e1-435f-91f7-818dbc83e24c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.305583] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 915.305583] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a8fcb0-edf9-f2c5-328c-20b2179f9134" [ 915.305583] env[61545]: _type = "Task" [ 915.305583] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.315274] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a8fcb0-edf9-f2c5-328c-20b2179f9134, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.388145] env[61545]: DEBUG oslo_vmware.api [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255979, 'name': PowerOffVM_Task, 'duration_secs': 0.200675} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.388445] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.388617] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.388882] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7388c307-3259-4504-9f98-113d001a978a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.487208] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.487457] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.487666] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleting the datastore file [datastore2] 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.487931] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-404b08e1-9012-4f3d-bb58-83dc5760df97 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.496618] env[61545]: DEBUG oslo_vmware.api [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 915.496618] env[61545]: value = "task-4255981" [ 915.496618] env[61545]: _type = "Task" [ 915.496618] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.508783] env[61545]: DEBUG oslo_vmware.api [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.598977] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdbd7c69-cef7-490f-990f-8b51772df0e6 req-0facf425-483e-47b6-85f1-e8cedd365ce4 service nova] Releasing lock "refresh_cache-1537dbf0-d1b6-410f-8333-788761dd24d7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.813682] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf271bc0-1c72-490f-b57c-46ee45ddafe5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.820018] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a8fcb0-edf9-f2c5-328c-20b2179f9134, 'name': SearchDatastore_Task, 'duration_secs': 0.012658} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.820790] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.821317] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1537dbf0-d1b6-410f-8333-788761dd24d7/1537dbf0-d1b6-410f-8333-788761dd24d7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.821680] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8b48b58-5a3b-4d12-9470-386d52cd5765 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.827141] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c9832a-59bc-4633-9fe1-4bf6645bed26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.831850] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 915.831850] env[61545]: value = "task-4255982" [ 915.831850] env[61545]: _type = "Task" [ 915.831850] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.867625] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe378473-d65e-4f34-93c3-3b3a9d6edd0b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.873571] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.879371] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eca2632-a2ee-4194-ba7d-33dc5c5ccd1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.894234] env[61545]: DEBUG nova.compute.provider_tree [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.010933] env[61545]: DEBUG oslo_vmware.api [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4255981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.356642} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.011334] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.011559] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.011796] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.012048] env[61545]: INFO nova.compute.manager [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Took 1.16 seconds to destroy the instance on the hypervisor. [ 916.012692] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.012692] env[61545]: DEBUG nova.compute.manager [-] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.012815] env[61545]: DEBUG nova.network.neutron [-] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.043374] env[61545]: DEBUG nova.network.neutron [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance_info_cache with network_info: [{"id": "fc3b9500-79f7-4be8-a298-f3522507a716", "address": "fa:16:3e:fc:1e:9e", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc3b9500-79", "ovs_interfaceid": "fc3b9500-79f7-4be8-a298-f3522507a716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.344590] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255982, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.400492] env[61545]: DEBUG nova.scheduler.client.report [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 916.546773] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.581256] env[61545]: DEBUG nova.compute.manager [req-ce94b057-15da-4ea7-bba6-9f735cd0dc3c req-75678c20-bda3-4aaa-992b-321de91e345e service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Received event network-vif-deleted-132b9a92-c7de-4fef-9870-cc4a0a7ae9c2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 916.581681] env[61545]: INFO nova.compute.manager [req-ce94b057-15da-4ea7-bba6-9f735cd0dc3c req-75678c20-bda3-4aaa-992b-321de91e345e service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Neutron deleted interface 132b9a92-c7de-4fef-9870-cc4a0a7ae9c2; detaching it from the instance and deleting it from the info cache [ 916.581681] env[61545]: DEBUG nova.network.neutron [req-ce94b057-15da-4ea7-bba6-9f735cd0dc3c req-75678c20-bda3-4aaa-992b-321de91e345e service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.843340] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670804} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.844031] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1537dbf0-d1b6-410f-8333-788761dd24d7/1537dbf0-d1b6-410f-8333-788761dd24d7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.844270] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.844632] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c891e8a-6f5a-41ba-b3a0-72fe9c11a335 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.852178] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 916.852178] env[61545]: value = "task-4255983" [ 916.852178] env[61545]: _type = "Task" [ 916.852178] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.862849] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255983, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.907348] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.168s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.910519] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.792s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.911454] env[61545]: DEBUG nova.objects.instance [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lazy-loading 'resources' on Instance uuid e8c954ec-de76-4d3e-9a63-6c30523d5b63 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.928969] env[61545]: DEBUG nova.network.neutron [-] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.932360] env[61545]: INFO nova.scheduler.client.report [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Deleted allocations for instance 602bd42d-6afa-4419-8352-73a9daab2fe0 [ 917.085445] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e75b259-ab41-429a-8124-ca44af2e55d7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.095718] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4d346b-ea5d-4853-a975-c4a9c706e524 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.138015] env[61545]: DEBUG nova.compute.manager [req-ce94b057-15da-4ea7-bba6-9f735cd0dc3c req-75678c20-bda3-4aaa-992b-321de91e345e service nova] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Detach interface failed, port_id=132b9a92-c7de-4fef-9870-cc4a0a7ae9c2, reason: Instance 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 917.363373] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255983, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071053} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.363688] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.364533] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c3994e-f4ba-4288-9a9d-b210910a5f97 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.388765] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 1537dbf0-d1b6-410f-8333-788761dd24d7/1537dbf0-d1b6-410f-8333-788761dd24d7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.389549] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8ade429-7daa-48f4-88b2-5cc7f1380f2f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.410728] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 917.410728] env[61545]: value = "task-4255984" [ 917.410728] env[61545]: _type = "Task" [ 917.410728] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.424016] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255984, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.432735] env[61545]: INFO nova.compute.manager [-] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Took 1.42 seconds to deallocate network for instance. [ 917.442601] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc922eaf-9d56-4e29-bc9f-f0260b18f499 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "602bd42d-6afa-4419-8352-73a9daab2fe0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.975s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.924942] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255984, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.943678] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.955519] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e782ec-ef77-4906-b767-817e7d7e4992 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.963591] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4994a3a8-ba20-46e2-86f5-7a6afecace2a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.967226] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "69c59bd5-1f57-4fa2-afab-348e5f57501e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.967554] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "69c59bd5-1f57-4fa2-afab-348e5f57501e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.967654] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "69c59bd5-1f57-4fa2-afab-348e5f57501e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.967831] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "69c59bd5-1f57-4fa2-afab-348e5f57501e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.967999] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "69c59bd5-1f57-4fa2-afab-348e5f57501e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.970194] env[61545]: INFO nova.compute.manager [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Terminating instance [ 917.999964] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "refresh_cache-69c59bd5-1f57-4fa2-afab-348e5f57501e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.000150] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquired lock "refresh_cache-69c59bd5-1f57-4fa2-afab-348e5f57501e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.000287] env[61545]: DEBUG nova.network.neutron [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.001890] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5d174e-2753-4c8a-b7e0-53b117820019 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.011328] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76beb49-7ee8-44b0-87b9-2efc63e8dbc6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.029130] env[61545]: DEBUG nova.compute.provider_tree [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.063014] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc9e580-b2a8-4c9b-ae57-0e0b61669363 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.087060] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance 'dad53420-37f1-42ef-b0d3-e35c73b97417' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 918.421490] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255984, 'name': ReconfigVM_Task, 'duration_secs': 0.724214} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.421765] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 1537dbf0-d1b6-410f-8333-788761dd24d7/1537dbf0-d1b6-410f-8333-788761dd24d7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.422445] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e87fe0b8-5c0a-41b3-afa1-c6e6f1e8688a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.429609] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 918.429609] env[61545]: value = "task-4255985" [ 918.429609] env[61545]: _type = "Task" [ 918.429609] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.438577] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255985, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.525490] env[61545]: DEBUG nova.network.neutron [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.532214] env[61545]: DEBUG nova.scheduler.client.report [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.594642] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.595029] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e861fb0-bec9-4ee5-8f24-415f353d0f9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.603079] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 918.603079] env[61545]: value = "task-4255986" [ 918.603079] env[61545]: _type = "Task" [ 918.603079] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.613344] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.614346] env[61545]: DEBUG nova.network.neutron [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.941028] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255985, 'name': Rename_Task, 'duration_secs': 0.224347} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.941375] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.941628] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9af96752-df0e-4196-8e8a-0d071739af5f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.949158] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 918.949158] env[61545]: value = "task-4255987" [ 918.949158] env[61545]: _type = "Task" [ 918.949158] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.958317] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255987, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.038517] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.128s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.042303] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.523s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.042303] env[61545]: DEBUG nova.objects.instance [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lazy-loading 'resources' on Instance uuid 13db992b-db13-451f-a853-9b7de28b9184 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.064333] env[61545]: INFO nova.scheduler.client.report [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Deleted allocations for instance e8c954ec-de76-4d3e-9a63-6c30523d5b63 [ 919.114408] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255986, 'name': PowerOffVM_Task, 'duration_secs': 0.358765} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.114693] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.114903] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance 'dad53420-37f1-42ef-b0d3-e35c73b97417' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 919.120107] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Releasing lock "refresh_cache-69c59bd5-1f57-4fa2-afab-348e5f57501e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.121222] env[61545]: DEBUG nova.compute.manager [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 919.121517] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.122812] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f97ad8-dc5b-48a3-849a-bcdf0816c06d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.131530] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 919.131791] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-789d749c-b6ce-4e41-a88b-f123831ad58f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.141091] env[61545]: DEBUG oslo_vmware.api [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 919.141091] env[61545]: value = "task-4255988" [ 919.141091] env[61545]: _type = "Task" [ 919.141091] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.149451] env[61545]: DEBUG oslo_vmware.api [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.461154] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255987, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.574828] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad68d57a-9fe2-401d-a5c1-744af92527b2 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "e8c954ec-de76-4d3e-9a63-6c30523d5b63" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.705s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 919.623154] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 919.623821] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 919.623821] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 919.632033] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-707b3016-7410-48be-bcce-0aa9d4784716 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.652305] env[61545]: DEBUG oslo_vmware.api [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255988, 'name': PowerOffVM_Task, 'duration_secs': 0.141045} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.653836] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.653971] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.654318] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 919.654318] env[61545]: value = "task-4255989" [ 919.654318] env[61545]: _type = "Task" [ 919.654318] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.657146] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e1e5657-b77f-4320-9a65-441981edd9b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.669613] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255989, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.694538] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 919.695031] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 919.695174] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Deleting the datastore file [datastore2] 69c59bd5-1f57-4fa2-afab-348e5f57501e {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.695462] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24dc7bec-551f-42e9-89cc-462587a403f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.705632] env[61545]: DEBUG oslo_vmware.api [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for the task: (returnval){ [ 919.705632] env[61545]: value = "task-4255991" [ 919.705632] env[61545]: _type = "Task" [ 919.705632] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.715218] env[61545]: DEBUG oslo_vmware.api [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255991, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.960648] env[61545]: DEBUG oslo_vmware.api [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255987, 'name': PowerOnVM_Task, 'duration_secs': 0.516877} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.962108] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.962108] env[61545]: INFO nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Took 8.34 seconds to spawn the instance on the hypervisor. [ 919.962108] env[61545]: DEBUG nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 919.964924] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa61a32-ac60-4df9-bd8f-6c6a75cec7ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.134563] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32c8ac8-c208-4c13-a095-9fbed67c659a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.141932] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16345fb2-5967-4619-8fdc-dfbd7487ab13 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.176722] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6903b23e-da5a-47bf-8ba9-399c3a13bf34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.187768] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063220b0-9a6e-4697-8dca-3bbbd5c6e719 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.191817] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255989, 'name': ReconfigVM_Task, 'duration_secs': 0.163664} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.192636] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance 'dad53420-37f1-42ef-b0d3-e35c73b97417' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 920.210170] env[61545]: DEBUG nova.compute.provider_tree [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.223179] env[61545]: DEBUG oslo_vmware.api [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Task: {'id': task-4255991, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145334} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.223179] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.223179] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 920.223364] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.223390] env[61545]: INFO nova.compute.manager [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Took 1.10 seconds to destroy the instance on the hypervisor. [ 920.223655] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 920.223871] env[61545]: DEBUG nova.compute.manager [-] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 920.223980] env[61545]: DEBUG nova.network.neutron [-] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 920.254634] env[61545]: DEBUG nova.network.neutron [-] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.482276] env[61545]: INFO nova.compute.manager [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Took 44.51 seconds to build instance. [ 920.701571] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0ed6a45a-0c2d-43c8-94d3-0da3debac597',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1743994676',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 920.701830] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.702031] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 920.702212] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.702365] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 920.702515] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 920.702715] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 920.702871] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 920.703045] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 920.703214] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 920.703384] env[61545]: DEBUG nova.virt.hardware [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 920.708834] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 920.709164] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4aa65956-3c31-4b9c-b4ea-10a7001a335c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.723041] env[61545]: DEBUG nova.scheduler.client.report [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.733385] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 920.733385] env[61545]: value = "task-4255992" [ 920.733385] env[61545]: _type = "Task" [ 920.733385] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.742279] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255992, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.756869] env[61545]: DEBUG nova.network.neutron [-] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.984588] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ce36afc5-5ead-46e4-8e83-526d9448dee5 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "1537dbf0-d1b6-410f-8333-788761dd24d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.648s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.229773] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.187s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.232172] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.128s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.232567] env[61545]: DEBUG nova.objects.instance [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lazy-loading 'resources' on Instance uuid 0554c462-1dc5-4043-94ac-7a3d28ed05e1 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.245845] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255992, 'name': ReconfigVM_Task, 'duration_secs': 0.167429} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.247025] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 921.248050] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c1b10b-b5e3-4aba-84d9-4241f3798dfa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.253289] env[61545]: INFO nova.scheduler.client.report [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted allocations for instance 13db992b-db13-451f-a853-9b7de28b9184 [ 921.273729] env[61545]: INFO nova.compute.manager [-] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Took 1.05 seconds to deallocate network for instance. [ 921.285465] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417/dad53420-37f1-42ef-b0d3-e35c73b97417.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.288302] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24d7e536-09cf-4b8f-a1e1-387ad2633fd8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.321239] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 921.321239] env[61545]: value = "task-4255993" [ 921.321239] env[61545]: _type = "Task" [ 921.321239] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.333792] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255993, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.489861] env[61545]: DEBUG nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 921.789915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-28f399c6-6add-4f54-87a1-ab381c3f08a9 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "13db992b-db13-451f-a853-9b7de28b9184" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.368s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.817400] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.833391] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255993, 'name': ReconfigVM_Task, 'duration_secs': 0.267324} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.833841] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfigured VM instance instance-00000032 to attach disk [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417/dad53420-37f1-42ef-b0d3-e35c73b97417.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.834215] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance 'dad53420-37f1-42ef-b0d3-e35c73b97417' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 922.008435] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.264945] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04dd18c-ca21-49b1-bcd4-81941e40f64c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.272795] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3474ef43-5aa3-4b35-8cb5-e032f43ef15c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.306674] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b9142a-cdc5-450a-a5be-80764626c285 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.315946] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0280e96-aa0b-4d9e-8cde-7467404cd786 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.331122] env[61545]: DEBUG nova.compute.provider_tree [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.344645] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8bfb92-3418-4a01-a901-f5b7b6a43ea0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.364147] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bcdc49-acc1-44d1-b874-0a819ffbdd89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.384472] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance 'dad53420-37f1-42ef-b0d3-e35c73b97417' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 922.834728] env[61545]: DEBUG nova.scheduler.client.report [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.963744] env[61545]: DEBUG nova.network.neutron [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Port fc3b9500-79f7-4be8-a298-f3522507a716 binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 923.343659] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.112s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.346353] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.200s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.347969] env[61545]: INFO nova.compute.claims [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.367994] env[61545]: INFO nova.scheduler.client.report [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Deleted allocations for instance 0554c462-1dc5-4043-94ac-7a3d28ed05e1 [ 923.779974] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "1537dbf0-d1b6-410f-8333-788761dd24d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.780345] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "1537dbf0-d1b6-410f-8333-788761dd24d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.780562] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "1537dbf0-d1b6-410f-8333-788761dd24d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.780750] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "1537dbf0-d1b6-410f-8333-788761dd24d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.780919] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "1537dbf0-d1b6-410f-8333-788761dd24d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.783114] env[61545]: INFO nova.compute.manager [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Terminating instance [ 923.876780] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8b5ac43e-7bb3-4d18-9b08-600f948e26f5 tempest-VolumesAssistedSnapshotsTest-760021604 tempest-VolumesAssistedSnapshotsTest-760021604-project-member] Lock "0554c462-1dc5-4043-94ac-7a3d28ed05e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.214s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.987152] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.987406] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.987612] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.287564] env[61545]: DEBUG nova.compute.manager [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 924.287810] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.288797] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2d1ab4-7ff1-486e-9e6a-5df7b29e8802 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.297787] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.298081] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6161811-6b73-4d39-8d6e-363942e8bd77 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.307034] env[61545]: DEBUG oslo_vmware.api [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 924.307034] env[61545]: value = "task-4255994" [ 924.307034] env[61545]: _type = "Task" [ 924.307034] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.321860] env[61545]: DEBUG oslo_vmware.api [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.828792] env[61545]: DEBUG oslo_vmware.api [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255994, 'name': PowerOffVM_Task, 'duration_secs': 0.240566} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.829658] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.830889] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 924.830889] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a0b055b-0eff-4e57-a608-8fb2532e2453 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.898626] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fde636e-f927-40bf-b731-18fec5ecf399 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.906592] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd52a5d-c618-44b1-92d5-4eb591b84d94 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.917796] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.917796] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.917796] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Deleting the datastore file [datastore2] 1537dbf0-d1b6-410f-8333-788761dd24d7 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.941455] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74e9bc72-abe2-4ede-97e6-f92bfb4b6b18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.944619] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b7de4c-4f46-4c53-ac11-2aaefd2b9dc4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.955269] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa1bd5c-6362-43cb-9874-94ddc8fe344e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.960867] env[61545]: DEBUG oslo_vmware.api [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 924.960867] env[61545]: value = "task-4255996" [ 924.960867] env[61545]: _type = "Task" [ 924.960867] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.977035] env[61545]: DEBUG nova.compute.provider_tree [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.983145] env[61545]: DEBUG oslo_vmware.api [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.029498] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.029498] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.029498] env[61545]: DEBUG nova.network.neutron [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.478869] env[61545]: DEBUG nova.scheduler.client.report [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 925.483718] env[61545]: DEBUG oslo_vmware.api [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4255996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167305} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.484362] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.484732] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.485032] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.485304] env[61545]: INFO nova.compute.manager [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Took 1.20 seconds to destroy the instance on the hypervisor. [ 925.485666] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 925.485964] env[61545]: DEBUG nova.compute.manager [-] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 925.486172] env[61545]: DEBUG nova.network.neutron [-] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.760122] env[61545]: DEBUG nova.network.neutron [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance_info_cache with network_info: [{"id": "fc3b9500-79f7-4be8-a298-f3522507a716", "address": "fa:16:3e:fc:1e:9e", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc3b9500-79", "ovs_interfaceid": "fc3b9500-79f7-4be8-a298-f3522507a716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.956932] env[61545]: DEBUG nova.compute.manager [req-f377aa97-d559-4eec-99c4-68d188fe0a19 req-ac4cb6d0-7dec-4f0f-a92f-52729d393205 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Received event network-vif-deleted-1e891b2e-5640-4992-b111-00dfc1ad763a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 925.957295] env[61545]: INFO nova.compute.manager [req-f377aa97-d559-4eec-99c4-68d188fe0a19 req-ac4cb6d0-7dec-4f0f-a92f-52729d393205 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Neutron deleted interface 1e891b2e-5640-4992-b111-00dfc1ad763a; detaching it from the instance and deleting it from the info cache [ 925.957408] env[61545]: DEBUG nova.network.neutron [req-f377aa97-d559-4eec-99c4-68d188fe0a19 req-ac4cb6d0-7dec-4f0f-a92f-52729d393205 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.985339] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.639s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.985869] env[61545]: DEBUG nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 925.988749] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.871s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.990824] env[61545]: INFO nova.compute.claims [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.264824] env[61545]: DEBUG oslo_concurrency.lockutils [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.438813] env[61545]: DEBUG nova.network.neutron [-] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.460481] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff9be192-54bc-4394-a2f2-65b9f5cbf93e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.474223] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c58ebb-bbd6-406c-8a31-4c0901e8b5ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.499354] env[61545]: DEBUG nova.compute.utils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 926.517822] env[61545]: DEBUG nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 926.517822] env[61545]: DEBUG nova.network.neutron [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.519929] env[61545]: DEBUG nova.compute.manager [req-f377aa97-d559-4eec-99c4-68d188fe0a19 req-ac4cb6d0-7dec-4f0f-a92f-52729d393205 service nova] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Detach interface failed, port_id=1e891b2e-5640-4992-b111-00dfc1ad763a, reason: Instance 1537dbf0-d1b6-410f-8333-788761dd24d7 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 926.603973] env[61545]: DEBUG nova.policy [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9bcc01a701c4b728d810b0b27ce6249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aeb51ace7650413b987be7ddd7490182', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 926.796469] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d30c42-95a3-422f-93d3-4b05d56bc63e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.816127] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942fe797-b819-483c-b101-f1bef557341f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.824754] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance 'dad53420-37f1-42ef-b0d3-e35c73b97417' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.942046] env[61545]: INFO nova.compute.manager [-] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Took 1.46 seconds to deallocate network for instance. [ 927.003800] env[61545]: DEBUG nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 927.017984] env[61545]: DEBUG nova.network.neutron [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Successfully created port: bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 927.333200] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.336191] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83b01be3-a484-48a9-8e76-60c2883119ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.343991] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 927.343991] env[61545]: value = "task-4255997" [ 927.343991] env[61545]: _type = "Task" [ 927.343991] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.355627] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.455272] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.620773] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee92a23a-e38a-419d-bc61-6b4f934b6f2d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.630655] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5946e5-ff61-4b81-98db-bd5a7af45514 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.668124] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeeb72aa-9b42-4b21-b438-dc426b581226 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.676773] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f2130e-f26a-4635-889e-dac4457b066f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.694681] env[61545]: DEBUG nova.compute.provider_tree [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.855910] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255997, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.022603] env[61545]: DEBUG nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 928.059067] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 928.059670] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.059670] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 928.059818] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.059866] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 928.060019] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 928.060244] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 928.060878] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 928.060878] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 928.060878] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 928.060878] env[61545]: DEBUG nova.virt.hardware [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 928.061763] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6fb138-0369-4aa7-bbf6-c7098f579f5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.071075] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924a6ab6-4148-42e6-89eb-e317ea9ea5fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.200154] env[61545]: DEBUG nova.scheduler.client.report [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.362286] env[61545]: DEBUG oslo_vmware.api [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4255997, 'name': PowerOnVM_Task, 'duration_secs': 0.571893} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.362588] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.362790] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-099f4a74-a32b-4e2d-af44-d0eb3ef50caa tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance 'dad53420-37f1-42ef-b0d3-e35c73b97417' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 928.602501] env[61545]: DEBUG nova.compute.manager [req-d4751248-c3ac-47a5-ba25-2f29e626db9e req-4fb69363-ec06-44ac-b963-2183e08629cd service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Received event network-vif-plugged-bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 928.602738] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4751248-c3ac-47a5-ba25-2f29e626db9e req-4fb69363-ec06-44ac-b963-2183e08629cd service nova] Acquiring lock "b1277c3b-cd7b-43be-9eff-640145dde5e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.602977] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4751248-c3ac-47a5-ba25-2f29e626db9e req-4fb69363-ec06-44ac-b963-2183e08629cd service nova] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.603220] env[61545]: DEBUG oslo_concurrency.lockutils [req-d4751248-c3ac-47a5-ba25-2f29e626db9e req-4fb69363-ec06-44ac-b963-2183e08629cd service nova] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.603396] env[61545]: DEBUG nova.compute.manager [req-d4751248-c3ac-47a5-ba25-2f29e626db9e req-4fb69363-ec06-44ac-b963-2183e08629cd service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] No waiting events found dispatching network-vif-plugged-bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 928.603601] env[61545]: WARNING nova.compute.manager [req-d4751248-c3ac-47a5-ba25-2f29e626db9e req-4fb69363-ec06-44ac-b963-2183e08629cd service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Received unexpected event network-vif-plugged-bf240544-0938-4b0c-9469-4d66bd37ee14 for instance with vm_state building and task_state spawning. [ 928.699252] env[61545]: DEBUG nova.network.neutron [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Successfully updated port: bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.705248] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.705248] env[61545]: DEBUG nova.compute.manager [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 928.707719] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.114s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.710268] env[61545]: INFO nova.compute.claims [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.202868] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.203129] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.203219] env[61545]: DEBUG nova.network.neutron [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.213186] env[61545]: DEBUG nova.compute.utils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 929.214556] env[61545]: DEBUG nova.compute.manager [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 929.249397] env[61545]: DEBUG nova.compute.manager [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 929.250399] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11280f3-4669-424b-ae81-8e4c8772d2e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.717903] env[61545]: DEBUG nova.compute.manager [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 929.754964] env[61545]: DEBUG nova.network.neutron [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.764659] env[61545]: INFO nova.compute.manager [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] instance snapshotting [ 929.765317] env[61545]: DEBUG nova.objects.instance [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'flavor' on Instance uuid 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.977497] env[61545]: DEBUG nova.network.neutron [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Updating instance_info_cache with network_info: [{"id": "bf240544-0938-4b0c-9469-4d66bd37ee14", "address": "fa:16:3e:4e:32:cd", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf240544-09", "ovs_interfaceid": "bf240544-0938-4b0c-9469-4d66bd37ee14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.272318] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dc240c-c9ec-4276-80d8-e9de9ed07ee8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.305429] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23847f60-2d4f-4a9e-a313-8162a1a76dbe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.339036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfcd996-440e-42fd-a5d6-4dfc6503adb2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.348566] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b66cd78-9175-485f-b8a2-a870e33598b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.382718] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b24ca77-bc86-49fe-96f0-28d4b89598f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.391745] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ccd437-ed8e-4657-bce1-1ee1493d5caf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.410074] env[61545]: DEBUG nova.compute.provider_tree [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.482659] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.483015] env[61545]: DEBUG nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Instance network_info: |[{"id": "bf240544-0938-4b0c-9469-4d66bd37ee14", "address": "fa:16:3e:4e:32:cd", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf240544-09", "ovs_interfaceid": "bf240544-0938-4b0c-9469-4d66bd37ee14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 930.483947] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:32:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10ff2092-e8eb-4768-ad4a-65a80560b447', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf240544-0938-4b0c-9469-4d66bd37ee14', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.491536] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Creating folder: Project (aeb51ace7650413b987be7ddd7490182). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.492246] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e5a389a-d9da-4e0e-a1f1-4b0a3d568a64 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.506860] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Created folder: Project (aeb51ace7650413b987be7ddd7490182) in parent group-v838542. [ 930.506860] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Creating folder: Instances. Parent ref: group-v838701. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.506860] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-509e462f-8728-4cae-9b95-1d31cc00e27d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.517377] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Created folder: Instances in parent group-v838701. [ 930.517632] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 930.517835] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.518061] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa49598e-cd6f-4e50-a079-9f0217d77435 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.539722] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.539722] env[61545]: value = "task-4256000" [ 930.539722] env[61545]: _type = "Task" [ 930.539722] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.548741] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256000, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.650613] env[61545]: DEBUG nova.compute.manager [req-e520519f-443f-49ae-93eb-e1e8c2bb42c7 req-82e082d1-b4e1-4a09-88c5-8cd7743c8ea7 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Received event network-changed-bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 930.651198] env[61545]: DEBUG nova.compute.manager [req-e520519f-443f-49ae-93eb-e1e8c2bb42c7 req-82e082d1-b4e1-4a09-88c5-8cd7743c8ea7 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Refreshing instance network info cache due to event network-changed-bf240544-0938-4b0c-9469-4d66bd37ee14. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 930.651737] env[61545]: DEBUG oslo_concurrency.lockutils [req-e520519f-443f-49ae-93eb-e1e8c2bb42c7 req-82e082d1-b4e1-4a09-88c5-8cd7743c8ea7 service nova] Acquiring lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.652435] env[61545]: DEBUG oslo_concurrency.lockutils [req-e520519f-443f-49ae-93eb-e1e8c2bb42c7 req-82e082d1-b4e1-4a09-88c5-8cd7743c8ea7 service nova] Acquired lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.652799] env[61545]: DEBUG nova.network.neutron [req-e520519f-443f-49ae-93eb-e1e8c2bb42c7 req-82e082d1-b4e1-4a09-88c5-8cd7743c8ea7 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Refreshing network info cache for port bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.731182] env[61545]: DEBUG nova.compute.manager [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 930.773080] env[61545]: DEBUG nova.network.neutron [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Port fc3b9500-79f7-4be8-a298-f3522507a716 binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 930.773622] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.773622] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.773796] env[61545]: DEBUG nova.network.neutron [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.777498] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.777743] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.777895] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.778119] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.778340] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.778464] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.778982] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.778982] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.779102] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.779606] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.779606] env[61545]: DEBUG nova.virt.hardware [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.780472] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b55ffc-9ddd-4684-bf9c-8edd70eeb667 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.791400] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de87983-3c29-4c99-be4c-1b3d74fff409 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.806923] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.812755] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Creating folder: Project (11f9afdf47dd45719fdf0c1f2685e1b6). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.813188] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24af7510-3e1f-42ed-8dfc-3c30bd04f1f6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.819037] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 930.819406] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2cc713bc-5c0b-42e0-8c1b-8f44e020ae49 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.827088] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Created folder: Project (11f9afdf47dd45719fdf0c1f2685e1b6) in parent group-v838542. [ 930.827335] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Creating folder: Instances. Parent ref: group-v838704. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.828761] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2cdc527-3d11-4cfc-941d-b9e926a6eb07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.830785] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 930.830785] env[61545]: value = "task-4256002" [ 930.830785] env[61545]: _type = "Task" [ 930.830785] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.840539] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Created folder: Instances in parent group-v838704. [ 930.840929] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 930.845837] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.846179] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256002, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.846402] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c865fc6-b05c-4539-829f-c2d33a75092a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.864762] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.864762] env[61545]: value = "task-4256004" [ 930.864762] env[61545]: _type = "Task" [ 930.864762] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.873050] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256004, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.913577] env[61545]: DEBUG nova.scheduler.client.report [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 931.050685] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256000, 'name': CreateVM_Task, 'duration_secs': 0.34974} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.050908] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.051697] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.051903] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.052305] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 931.052599] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4a1b5e1-3c1f-48e5-ad16-2730e6b4c2d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.059122] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 931.059122] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5263dcae-f476-59f3-3839-6084d3b6b4bb" [ 931.059122] env[61545]: _type = "Task" [ 931.059122] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.068769] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5263dcae-f476-59f3-3839-6084d3b6b4bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.343312] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256002, 'name': CreateSnapshot_Task, 'duration_secs': 0.485178} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.344095] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 931.345013] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d17ea08-380c-4710-b553-3f6f932688d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.375458] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256004, 'name': CreateVM_Task, 'duration_secs': 0.293243} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.375787] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.376343] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.421100] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.421100] env[61545]: DEBUG nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 931.434310] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.563s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.437494] env[61545]: INFO nova.compute.claims [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.572318] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5263dcae-f476-59f3-3839-6084d3b6b4bb, 'name': SearchDatastore_Task, 'duration_secs': 0.012549} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.572318] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.572318] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.572871] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.573326] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.573661] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.574072] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.574565] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 931.574971] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af042caa-e639-4a5f-b5ba-2c4e7bf744df {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.577645] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c74bb97-6784-408b-8912-38fe2b7c6858 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.585029] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 931.585029] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c4fa3-d920-a720-b8b2-50a7f2daf7d6" [ 931.585029] env[61545]: _type = "Task" [ 931.585029] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.590138] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.590513] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.591776] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db4bed5e-9294-4dc7-97c4-0f1120901aac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.602591] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c4fa3-d920-a720-b8b2-50a7f2daf7d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.607852] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 931.607852] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ce3c61-01d2-8e25-50f5-a194ccb019ce" [ 931.607852] env[61545]: _type = "Task" [ 931.607852] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.619440] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ce3c61-01d2-8e25-50f5-a194ccb019ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.671379] env[61545]: DEBUG nova.network.neutron [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance_info_cache with network_info: [{"id": "fc3b9500-79f7-4be8-a298-f3522507a716", "address": "fa:16:3e:fc:1e:9e", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc3b9500-79", "ovs_interfaceid": "fc3b9500-79f7-4be8-a298-f3522507a716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.823641] env[61545]: DEBUG nova.network.neutron [req-e520519f-443f-49ae-93eb-e1e8c2bb42c7 req-82e082d1-b4e1-4a09-88c5-8cd7743c8ea7 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Updated VIF entry in instance network info cache for port bf240544-0938-4b0c-9469-4d66bd37ee14. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.823995] env[61545]: DEBUG nova.network.neutron [req-e520519f-443f-49ae-93eb-e1e8c2bb42c7 req-82e082d1-b4e1-4a09-88c5-8cd7743c8ea7 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Updating instance_info_cache with network_info: [{"id": "bf240544-0938-4b0c-9469-4d66bd37ee14", "address": "fa:16:3e:4e:32:cd", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf240544-09", "ovs_interfaceid": "bf240544-0938-4b0c-9469-4d66bd37ee14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.865546] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 931.865851] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5cc01002-806c-4a0e-9ec7-822136e52794 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.877221] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 931.877221] env[61545]: value = "task-4256005" [ 931.877221] env[61545]: _type = "Task" [ 931.877221] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.887441] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256005, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.945102] env[61545]: DEBUG nova.compute.utils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 931.948590] env[61545]: DEBUG nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 931.948857] env[61545]: DEBUG nova.network.neutron [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 932.011623] env[61545]: DEBUG nova.policy [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c294d699abb483f9c63ffea01adf0fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e026364ae074b0b8a6a6ef4a8d841ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 932.095790] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529c4fa3-d920-a720-b8b2-50a7f2daf7d6, 'name': SearchDatastore_Task, 'duration_secs': 0.023704} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.096023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.096285] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.096501] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.119493] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ce3c61-01d2-8e25-50f5-a194ccb019ce, 'name': SearchDatastore_Task, 'duration_secs': 0.020171} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.120510] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02e84e61-65e7-4e77-81df-1a70054b3f7e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.127490] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 932.127490] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f160d1-0de5-64da-488b-529bb745518c" [ 932.127490] env[61545]: _type = "Task" [ 932.127490] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.136928] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f160d1-0de5-64da-488b-529bb745518c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.178872] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.327235] env[61545]: DEBUG oslo_concurrency.lockutils [req-e520519f-443f-49ae-93eb-e1e8c2bb42c7 req-82e082d1-b4e1-4a09-88c5-8cd7743c8ea7 service nova] Releasing lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.388202] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256005, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.450551] env[61545]: DEBUG nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 932.492135] env[61545]: DEBUG nova.network.neutron [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Successfully created port: 989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.638801] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f160d1-0de5-64da-488b-529bb745518c, 'name': SearchDatastore_Task, 'duration_secs': 0.019126} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.641797] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.642149] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] b1277c3b-cd7b-43be-9eff-640145dde5e5/b1277c3b-cd7b-43be-9eff-640145dde5e5.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.642691] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.642898] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.643157] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10ba85a7-b8f6-45c6-bb26-b3278bb911b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.647632] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71352e2a-479f-42cf-a910-15856c372b6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.657048] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 932.657048] env[61545]: value = "task-4256006" [ 932.657048] env[61545]: _type = "Task" [ 932.657048] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.661023] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.661023] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.662262] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af06f918-1fb2-4252-863d-0322d363e55b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.674243] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.675486] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 932.675486] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a3a4cd-eb9b-2378-e102-cf155054d999" [ 932.675486] env[61545]: _type = "Task" [ 932.675486] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.685871] env[61545]: DEBUG nova.compute.manager [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=61545) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 932.686331] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.686331] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a3a4cd-eb9b-2378-e102-cf155054d999, 'name': SearchDatastore_Task, 'duration_secs': 0.01011} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.687209] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edd6be47-8767-4460-a2e5-6e6e7c8cd000 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.699230] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 932.699230] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52142364-e271-7555-7120-58c663fc8e80" [ 932.699230] env[61545]: _type = "Task" [ 932.699230] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.710447] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52142364-e271-7555-7120-58c663fc8e80, 'name': SearchDatastore_Task} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.713394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.713686] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a/19aabdc5-8d2f-4adb-aea0-34ce4482677a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.714193] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9aeaa414-aaf9-4a01-a67f-e97a1a073888 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.721493] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 932.721493] env[61545]: value = "task-4256007" [ 932.721493] env[61545]: _type = "Task" [ 932.721493] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.732775] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.890745] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256005, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.040684] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f653e37-6c37-4d31-ae89-2c1b62a15e23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.054625] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d665ce-2418-4a3b-a504-3390d0c27ea4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.090647] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a5f088-02c9-4e19-9a79-91f5d36b14e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.099732] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b3a84c-baf3-416d-9b57-f7e3f9cadc07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.120181] env[61545]: DEBUG nova.compute.provider_tree [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.174795] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256006, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513611} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.175521] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] b1277c3b-cd7b-43be-9eff-640145dde5e5/b1277c3b-cd7b-43be-9eff-640145dde5e5.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.175950] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.176404] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21114d73-b411-41ee-a78a-2f524010e2b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.184315] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 933.184315] env[61545]: value = "task-4256008" [ 933.184315] env[61545]: _type = "Task" [ 933.184315] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.198015] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.234020] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256007, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.390974] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256005, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.470231] env[61545]: DEBUG nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 933.501555] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 933.501833] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.501992] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 933.502197] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.502349] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 933.503330] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 933.503330] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 933.503330] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 933.503330] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 933.503330] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 933.503593] env[61545]: DEBUG nova.virt.hardware [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 933.505279] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8c03fe-2bed-4e96-85ac-14e61ea7aa74 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.515979] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9840f1b9-b11c-42bb-8123-cee65e78a65f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.629032] env[61545]: DEBUG nova.scheduler.client.report [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 933.698079] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256008, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154102} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.698530] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 933.699970] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe6e411-25d6-4972-82e2-60ae6609e5c3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.724888] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] b1277c3b-cd7b-43be-9eff-640145dde5e5/b1277c3b-cd7b-43be-9eff-640145dde5e5.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.725538] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e31504b-a60d-4fbb-bdc6-2d4f3b4ca106 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.749401] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.804117} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.750914] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a/19aabdc5-8d2f-4adb-aea0-34ce4482677a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.751169] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.751511] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 933.751511] env[61545]: value = "task-4256009" [ 933.751511] env[61545]: _type = "Task" [ 933.751511] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.752238] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c57f7a3b-0cf5-4983-a691-a360de8d7e16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.762286] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256009, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.763880] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 933.763880] env[61545]: value = "task-4256010" [ 933.763880] env[61545]: _type = "Task" [ 933.763880] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.773338] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256010, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.892430] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256005, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.133374] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.133902] env[61545]: DEBUG nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 934.137023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 38.225s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.147819] env[61545]: DEBUG nova.compute.manager [req-7e0c888a-69f8-4e1e-b575-72b0016f361e req-4b1e897e-5c5e-4daf-a597-65d990ca7574 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Received event network-vif-plugged-989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 934.147819] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e0c888a-69f8-4e1e-b575-72b0016f361e req-4b1e897e-5c5e-4daf-a597-65d990ca7574 service nova] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.147819] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e0c888a-69f8-4e1e-b575-72b0016f361e req-4b1e897e-5c5e-4daf-a597-65d990ca7574 service nova] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.147819] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e0c888a-69f8-4e1e-b575-72b0016f361e req-4b1e897e-5c5e-4daf-a597-65d990ca7574 service nova] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.147819] env[61545]: DEBUG nova.compute.manager [req-7e0c888a-69f8-4e1e-b575-72b0016f361e req-4b1e897e-5c5e-4daf-a597-65d990ca7574 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] No waiting events found dispatching network-vif-plugged-989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 934.147819] env[61545]: WARNING nova.compute.manager [req-7e0c888a-69f8-4e1e-b575-72b0016f361e req-4b1e897e-5c5e-4daf-a597-65d990ca7574 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Received unexpected event network-vif-plugged-989b3fc6-0843-488f-9af2-39bb487eb78a for instance with vm_state building and task_state spawning. [ 934.243810] env[61545]: DEBUG nova.network.neutron [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Successfully updated port: 989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 934.270488] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256009, 'name': ReconfigVM_Task, 'duration_secs': 0.308879} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.275856] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Reconfigured VM instance instance-0000003d to attach disk [datastore2] b1277c3b-cd7b-43be-9eff-640145dde5e5/b1277c3b-cd7b-43be-9eff-640145dde5e5.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.276182] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-926b0208-419f-4027-8082-d39566e4e3c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.284016] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256010, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088141} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.285417] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.285782] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 934.285782] env[61545]: value = "task-4256011" [ 934.285782] env[61545]: _type = "Task" [ 934.285782] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.286637] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d0c6ac-e935-4543-9f47-88101e209635 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.315487] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a/19aabdc5-8d2f-4adb-aea0-34ce4482677a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.319436] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c06de9ac-917d-4180-b745-449b7f9245f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.335191] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256011, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.342215] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 934.342215] env[61545]: value = "task-4256012" [ 934.342215] env[61545]: _type = "Task" [ 934.342215] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.352930] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256012, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.392108] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256005, 'name': CloneVM_Task, 'duration_secs': 2.042237} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.392108] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Created linked-clone VM from snapshot [ 934.393297] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0897ee-bb3d-41f7-862b-2577f6312d7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.402028] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Uploading image 2416e430-378b-4a56-8d95-29053d0d652b {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 934.433974] env[61545]: DEBUG oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 934.433974] env[61545]: value = "vm-838708" [ 934.433974] env[61545]: _type = "VirtualMachine" [ 934.433974] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 934.433974] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6db96326-c7b7-4541-975d-4328cadd08b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.442485] env[61545]: DEBUG oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease: (returnval){ [ 934.442485] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523622bc-91cf-7400-b174-c38580b90b04" [ 934.442485] env[61545]: _type = "HttpNfcLease" [ 934.442485] env[61545]: } obtained for exporting VM: (result){ [ 934.442485] env[61545]: value = "vm-838708" [ 934.442485] env[61545]: _type = "VirtualMachine" [ 934.442485] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 934.442485] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the lease: (returnval){ [ 934.442485] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523622bc-91cf-7400-b174-c38580b90b04" [ 934.442485] env[61545]: _type = "HttpNfcLease" [ 934.442485] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 934.452026] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 934.452026] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523622bc-91cf-7400-b174-c38580b90b04" [ 934.452026] env[61545]: _type = "HttpNfcLease" [ 934.452026] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 934.628900] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "665db895-52ce-4e7c-9a78-86db5b695534" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.631019] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "665db895-52ce-4e7c-9a78-86db5b695534" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.640429] env[61545]: DEBUG nova.compute.utils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 934.641841] env[61545]: DEBUG nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 934.642079] env[61545]: DEBUG nova.network.neutron [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 934.733983] env[61545]: DEBUG nova.policy [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '483a3570e5d8427aa281abd6624fcfb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0f97aab169448c5a0d956b1b33e1ac2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 934.747189] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.747397] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.747617] env[61545]: DEBUG nova.network.neutron [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 934.802393] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256011, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.853944] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.957351] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 934.957351] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523622bc-91cf-7400-b174-c38580b90b04" [ 934.957351] env[61545]: _type = "HttpNfcLease" [ 934.957351] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 934.957351] env[61545]: DEBUG oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 934.957351] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523622bc-91cf-7400-b174-c38580b90b04" [ 934.957351] env[61545]: _type = "HttpNfcLease" [ 934.957351] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 934.958060] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da524b24-8ecb-4431-b600-ca9508aa39a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.972103] env[61545]: DEBUG oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263c076-1c24-d903-a397-d8b97f5af0f5/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 934.972398] env[61545]: DEBUG oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263c076-1c24-d903-a397-d8b97f5af0f5/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 935.074296] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2290edfb-60ba-4f21-a704-f497f4dcb3d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.140792] env[61545]: DEBUG nova.network.neutron [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Successfully created port: 2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.161471] env[61545]: DEBUG nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 935.168039] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Applying migration context for instance dad53420-37f1-42ef-b0d3-e35c73b97417 as it has an incoming, in-progress migration cba5117b-fb7a-4947-b2b6-06dabedaf661. Migration status is reverting {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 935.170053] env[61545]: INFO nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating resource usage from migration cba5117b-fb7a-4947-b2b6-06dabedaf661 [ 935.210057] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 844f01ed-4dae-4e13-9d1c-09a73f413201 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.210193] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance eced4107-b99e-479e-b22c-2157320ecf95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.210324] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 4b29ebc4-d913-447c-bc57-890953cf8d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.210481] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d7e25ea6-7076-4ab2-aed6-fe5232c2665d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.210761] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.210761] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 8d838d3b-32ad-4bb2-839e-6bd81c363447 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.210860] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5f4d6338-d1af-4e58-9f76-5e95d51e76f7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 935.212802] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 16bc91d0-71c3-4bd9-980b-6574c3fd9335 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.212802] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e21de424-8121-4e2f-84c2-8096ba8048cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.212802] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 9cf6dd9e-40e9-4df6-9342-2850e0f93d85 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 935.212802] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance ced5bde7-07b9-4d07-8b13-49f6fb006eed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.212802] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 69c59bd5-1f57-4fa2-afab-348e5f57501e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 935.212802] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e3742aa7-0b26-41f5-b8c0-9388ef2b7e74 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 935.212802] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 62301196-fb8a-45fe-9193-0ad8f7126ab5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.212802] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 9b62358e-c834-461c-9954-49f513b0f4ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.212802] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.212802] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 935.212802] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance f7a16153-2ef7-4be4-90a2-5ad6616203f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.212802] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance a84d7a3d-2f7e-459d-94ca-7caa32b7a472 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 935.212802] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance bea2e59c-02fd-4d6d-8f10-b0e265fa87a2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 935.213400] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 56680678-c844-4dd2-8541-d50de83b22d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.213400] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1537dbf0-d1b6-410f-8333-788761dd24d7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 935.213400] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance b1277c3b-cd7b-43be-9eff-640145dde5e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.213400] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 19aabdc5-8d2f-4adb-aea0-34ce4482677a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.213400] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance c2bb4ea0-e9fb-4198-80fa-acfd25fb226d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.213400] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance f9c9c447-e676-4143-b329-fb6d71bcd553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 935.289897] env[61545]: DEBUG nova.network.neutron [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 935.304245] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256011, 'name': Rename_Task, 'duration_secs': 0.894891} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.304533] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.304791] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-301b4cc5-1b47-43ef-89af-6ba0a87dc366 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.312086] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 935.312086] env[61545]: value = "task-4256014" [ 935.312086] env[61545]: _type = "Task" [ 935.312086] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.324215] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.357425] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.470585] env[61545]: DEBUG nova.network.neutron [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updating instance_info_cache with network_info: [{"id": "989b3fc6-0843-488f-9af2-39bb487eb78a", "address": "fa:16:3e:40:56:28", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989b3fc6-08", "ovs_interfaceid": "989b3fc6-0843-488f-9af2-39bb487eb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.716839] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1e5be92c-d727-4515-9e16-85ade2719455 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 935.823766] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256014, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.857498] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256012, 'name': ReconfigVM_Task, 'duration_secs': 1.114936} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.857874] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a/19aabdc5-8d2f-4adb-aea0-34ce4482677a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.858681] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fef9e2f-f016-48e7-a8d0-32be58727599 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.866962] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 935.866962] env[61545]: value = "task-4256015" [ 935.866962] env[61545]: _type = "Task" [ 935.866962] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.875690] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256015, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.973727] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Releasing lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.974683] env[61545]: DEBUG nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Instance network_info: |[{"id": "989b3fc6-0843-488f-9af2-39bb487eb78a", "address": "fa:16:3e:40:56:28", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989b3fc6-08", "ovs_interfaceid": "989b3fc6-0843-488f-9af2-39bb487eb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 935.974683] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:56:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '65497291-07f3-434c-bd42-657a0cb03365', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '989b3fc6-0843-488f-9af2-39bb487eb78a', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.982817] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Creating folder: Project (9e026364ae074b0b8a6a6ef4a8d841ba). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 935.983182] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07b12d23-48a4-4408-b1b4-6a78b5787093 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.995240] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Created folder: Project (9e026364ae074b0b8a6a6ef4a8d841ba) in parent group-v838542. [ 935.995472] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Creating folder: Instances. Parent ref: group-v838709. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 935.995792] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f4b2354-10eb-4797-a609-cba237ecc4c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.007854] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Created folder: Instances in parent group-v838709. [ 936.008312] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.008467] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 936.008769] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d7df079-5caa-4a44-8eea-424d7b2ad32a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.030770] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.030770] env[61545]: value = "task-4256018" [ 936.030770] env[61545]: _type = "Task" [ 936.030770] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.041073] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256018, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.188019] env[61545]: DEBUG nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 936.221490] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 936.221612] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.221720] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 936.221875] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.222336] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 936.222565] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 936.222719] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 936.222919] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 936.223055] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 936.223761] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 936.224042] env[61545]: DEBUG nova.virt.hardware [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 936.225994] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 8ab168cb-b0a9-403c-bdb5-b96c6d319baf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 936.228371] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f56e1b-9111-43af-8a46-5d18263c8c4b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.240323] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7409902-74db-4015-a032-eee0aa3b8343 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.249215] env[61545]: DEBUG nova.compute.manager [req-e46b1d32-4a92-44f0-9069-9e9b126312e5 req-d160c7e5-8558-4a1a-b59b-c9cbb309381f service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Received event network-changed-989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 936.249442] env[61545]: DEBUG nova.compute.manager [req-e46b1d32-4a92-44f0-9069-9e9b126312e5 req-d160c7e5-8558-4a1a-b59b-c9cbb309381f service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Refreshing instance network info cache due to event network-changed-989b3fc6-0843-488f-9af2-39bb487eb78a. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 936.249677] env[61545]: DEBUG oslo_concurrency.lockutils [req-e46b1d32-4a92-44f0-9069-9e9b126312e5 req-d160c7e5-8558-4a1a-b59b-c9cbb309381f service nova] Acquiring lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.249901] env[61545]: DEBUG oslo_concurrency.lockutils [req-e46b1d32-4a92-44f0-9069-9e9b126312e5 req-d160c7e5-8558-4a1a-b59b-c9cbb309381f service nova] Acquired lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.250185] env[61545]: DEBUG nova.network.neutron [req-e46b1d32-4a92-44f0-9069-9e9b126312e5 req-d160c7e5-8558-4a1a-b59b-c9cbb309381f service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Refreshing network info cache for port 989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 936.328615] env[61545]: DEBUG oslo_vmware.api [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256014, 'name': PowerOnVM_Task, 'duration_secs': 0.514856} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.329599] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.329923] env[61545]: INFO nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Took 8.31 seconds to spawn the instance on the hypervisor. [ 936.330191] env[61545]: DEBUG nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.331912] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb0d4ac-4d31-4f94-9f72-1ec13cd7a879 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.377569] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256015, 'name': Rename_Task, 'duration_secs': 0.244321} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.380453] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.380453] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04f21c05-53f7-4366-bfe6-3863e2ffa28a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.386877] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 936.386877] env[61545]: value = "task-4256019" [ 936.386877] env[61545]: _type = "Task" [ 936.386877] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.398287] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.543952] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256018, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.734741] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 936.734894] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Migration cba5117b-fb7a-4947-b2b6-06dabedaf661 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 936.862982] env[61545]: INFO nova.compute.manager [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Took 53.74 seconds to build instance. [ 936.906405] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256019, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.026553] env[61545]: DEBUG nova.network.neutron [req-e46b1d32-4a92-44f0-9069-9e9b126312e5 req-d160c7e5-8558-4a1a-b59b-c9cbb309381f service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updated VIF entry in instance network info cache for port 989b3fc6-0843-488f-9af2-39bb487eb78a. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 937.027076] env[61545]: DEBUG nova.network.neutron [req-e46b1d32-4a92-44f0-9069-9e9b126312e5 req-d160c7e5-8558-4a1a-b59b-c9cbb309381f service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updating instance_info_cache with network_info: [{"id": "989b3fc6-0843-488f-9af2-39bb487eb78a", "address": "fa:16:3e:40:56:28", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989b3fc6-08", "ovs_interfaceid": "989b3fc6-0843-488f-9af2-39bb487eb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.043372] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256018, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.199993] env[61545]: DEBUG nova.network.neutron [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Successfully updated port: 2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.240104] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 73df6df0-ead6-49cd-8b0a-5e95acfc7e15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 937.240317] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance dad53420-37f1-42ef-b0d3-e35c73b97417 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 937.364843] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5ac858b1-a5c9-41e4-a49b-5ee50555e312 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.396s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.401056] env[61545]: DEBUG oslo_vmware.api [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256019, 'name': PowerOnVM_Task, 'duration_secs': 0.742142} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.401056] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.401268] env[61545]: INFO nova.compute.manager [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Took 6.67 seconds to spawn the instance on the hypervisor. [ 937.401446] env[61545]: DEBUG nova.compute.manager [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.402325] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856d7616-4d1a-4d72-b0d7-9134e2a794dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.530459] env[61545]: DEBUG oslo_concurrency.lockutils [req-e46b1d32-4a92-44f0-9069-9e9b126312e5 req-d160c7e5-8558-4a1a-b59b-c9cbb309381f service nova] Releasing lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.542879] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256018, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.702674] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.703072] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.703072] env[61545]: DEBUG nova.network.neutron [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.745186] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 578ce929-99fd-47ae-8275-e4ac9abe8d49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 937.869093] env[61545]: DEBUG nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 937.924114] env[61545]: INFO nova.compute.manager [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Took 52.83 seconds to build instance. [ 938.044530] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256018, 'name': CreateVM_Task, 'duration_secs': 1.55184} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.044783] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 938.045741] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.045832] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.049049] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 938.049049] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b69bfbf8-5a60-4001-a50e-6578d8e09f1d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.052727] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 938.052727] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a85cb1-1854-9289-832f-37267119f3d6" [ 938.052727] env[61545]: _type = "Task" [ 938.052727] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.062385] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a85cb1-1854-9289-832f-37267119f3d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.248477] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5ba53915-ab57-493e-b2e1-7f3d1b3845ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 938.279156] env[61545]: DEBUG nova.network.neutron [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.346836] env[61545]: DEBUG nova.compute.manager [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received event network-vif-plugged-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 938.347427] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Acquiring lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.347773] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.348141] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.348276] env[61545]: DEBUG nova.compute.manager [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] No waiting events found dispatching network-vif-plugged-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 938.348501] env[61545]: WARNING nova.compute.manager [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received unexpected event network-vif-plugged-2147b830-281d-4a24-90d1-22eccefc4c5c for instance with vm_state building and task_state spawning. [ 938.348743] env[61545]: DEBUG nova.compute.manager [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received event network-changed-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 938.348952] env[61545]: DEBUG nova.compute.manager [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Refreshing instance network info cache due to event network-changed-2147b830-281d-4a24-90d1-22eccefc4c5c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 938.352648] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Acquiring lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.402260] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.426641] env[61545]: DEBUG oslo_concurrency.lockutils [None req-db7df31f-94eb-4efe-b822-e766af6c8be8 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.791s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.567619] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a85cb1-1854-9289-832f-37267119f3d6, 'name': SearchDatastore_Task, 'duration_secs': 0.013079} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.567913] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.568176] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.568437] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.568588] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.568760] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.569103] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89cd68fd-aa1b-4323-bc20-4e3ef1d24b18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.579147] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.579436] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 938.580492] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59f81f20-bedf-4daa-aac0-2e5eb2790af5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.588626] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 938.588626] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d2e757-8a1d-4b2d-1ec0-41211bfa6964" [ 938.588626] env[61545]: _type = "Task" [ 938.588626] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.600058] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d2e757-8a1d-4b2d-1ec0-41211bfa6964, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.751750] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d980f421-03b5-4b0e-b547-a33031356d55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 938.781715] env[61545]: DEBUG nova.network.neutron [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating instance_info_cache with network_info: [{"id": "2147b830-281d-4a24-90d1-22eccefc4c5c", "address": "fa:16:3e:57:07:3e", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2147b830-28", "ovs_interfaceid": "2147b830-281d-4a24-90d1-22eccefc4c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.930922] env[61545]: DEBUG nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 939.103212] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d2e757-8a1d-4b2d-1ec0-41211bfa6964, 'name': SearchDatastore_Task, 'duration_secs': 0.013716} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.108115] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-563b6432-4315-4166-84ec-bfbaa2e6d3f6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.115984] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 939.115984] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ab69e7-52d0-867b-bd14-abf4c5d7b27d" [ 939.115984] env[61545]: _type = "Task" [ 939.115984] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.127163] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ab69e7-52d0-867b-bd14-abf4c5d7b27d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.263123] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 665db895-52ce-4e7c-9a78-86db5b695534 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 939.263270] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 939.263482] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4480MB phys_disk=250GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 939.288482] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.288482] env[61545]: DEBUG nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Instance network_info: |[{"id": "2147b830-281d-4a24-90d1-22eccefc4c5c", "address": "fa:16:3e:57:07:3e", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2147b830-28", "ovs_interfaceid": "2147b830-281d-4a24-90d1-22eccefc4c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 939.288482] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Acquired lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.288482] env[61545]: DEBUG nova.network.neutron [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Refreshing network info cache for port 2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.289033] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:07:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '557aba95-8968-407a-bac2-2fae66f7c8e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2147b830-281d-4a24-90d1-22eccefc4c5c', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.301233] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating folder: Project (a0f97aab169448c5a0d956b1b33e1ac2). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 939.305283] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67bd1c24-b828-4948-bccc-74cbe20d9dc3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.318174] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Created folder: Project (a0f97aab169448c5a0d956b1b33e1ac2) in parent group-v838542. [ 939.318174] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating folder: Instances. Parent ref: group-v838712. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 939.318352] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07432b3c-f156-44dd-9bca-20490a55917a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.330642] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Created folder: Instances in parent group-v838712. [ 939.331025] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 939.331274] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 939.331535] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-616139d2-73ae-43ea-ad3f-243f894ee447 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.359560] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.359560] env[61545]: value = "task-4256022" [ 939.359560] env[61545]: _type = "Task" [ 939.359560] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.374761] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256022, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.454701] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.612486] env[61545]: INFO nova.compute.manager [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Rebuilding instance [ 939.627464] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ab69e7-52d0-867b-bd14-abf4c5d7b27d, 'name': SearchDatastore_Task, 'duration_secs': 0.016651} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.630406] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.630693] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c2bb4ea0-e9fb-4198-80fa-acfd25fb226d/c2bb4ea0-e9fb-4198-80fa-acfd25fb226d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.631500] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c111cf82-0421-41c3-a159-aeedd884fd96 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.648228] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 939.648228] env[61545]: value = "task-4256023" [ 939.648228] env[61545]: _type = "Task" [ 939.648228] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.657093] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.666207] env[61545]: DEBUG nova.compute.manager [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.667397] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ada0a0-205c-45c2-a899-1988de1bc641 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.846944] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2b8d17-ac69-4366-a1ca-b9bfd208445b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.856084] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2a7333-d448-473a-a493-0137a9c94bcd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.892677] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b299f3-0a32-4010-84cc-8e3a28e64ca6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.901244] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256022, 'name': CreateVM_Task, 'duration_secs': 0.456915} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.902198] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.902904] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.903105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.903546] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 939.906039] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e2f6b66-0ab0-4664-8657-5539ff74a47c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.909198] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be20a1dd-0922-48ae-a94f-9e9da5d63996 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.917589] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 939.917589] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526373ce-2f28-4f63-2557-b9ca0f04eb99" [ 939.917589] env[61545]: _type = "Task" [ 939.917589] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.926234] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.939896] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526373ce-2f28-4f63-2557-b9ca0f04eb99, 'name': SearchDatastore_Task, 'duration_secs': 0.016105} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.940918] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.941219] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 939.941491] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.941658] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.941840] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.942410] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67080cbf-a6d6-426a-b289-38af5705eeaa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.964124] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.964328] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 939.965289] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec53d122-d275-47c6-bb58-6d5ff9c505a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.972593] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 939.972593] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ed521e-45b3-5614-4201-132333823c77" [ 939.972593] env[61545]: _type = "Task" [ 939.972593] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.983102] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ed521e-45b3-5614-4201-132333823c77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.145557] env[61545]: DEBUG nova.network.neutron [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updated VIF entry in instance network info cache for port 2147b830-281d-4a24-90d1-22eccefc4c5c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.145557] env[61545]: DEBUG nova.network.neutron [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating instance_info_cache with network_info: [{"id": "2147b830-281d-4a24-90d1-22eccefc4c5c", "address": "fa:16:3e:57:07:3e", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2147b830-28", "ovs_interfaceid": "2147b830-281d-4a24-90d1-22eccefc4c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.159968] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256023, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.434442] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.484076] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ed521e-45b3-5614-4201-132333823c77, 'name': SearchDatastore_Task, 'duration_secs': 0.074722} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.485028] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-356b1a70-56fc-4973-9968-f6448c0aaaef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.491584] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 940.491584] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5291fb64-a00a-640e-1ab2-58af0150bbf0" [ 940.491584] env[61545]: _type = "Task" [ 940.491584] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.500350] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5291fb64-a00a-640e-1ab2-58af0150bbf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.648303] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Releasing lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.648528] env[61545]: DEBUG nova.compute.manager [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Received event network-changed-bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 940.648791] env[61545]: DEBUG nova.compute.manager [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Refreshing instance network info cache due to event network-changed-bf240544-0938-4b0c-9469-4d66bd37ee14. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 940.649046] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Acquiring lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.649326] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Acquired lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.649486] env[61545]: DEBUG nova.network.neutron [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Refreshing network info cache for port bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.661606] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.730499} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.662549] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c2bb4ea0-e9fb-4198-80fa-acfd25fb226d/c2bb4ea0-e9fb-4198-80fa-acfd25fb226d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.662894] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.663064] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52afbef8-3893-43b6-9b10-f9ba5d139387 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.672687] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 940.672687] env[61545]: value = "task-4256024" [ 940.672687] env[61545]: _type = "Task" [ 940.672687] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.683204] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.683559] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.683837] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2e6a1ef-986d-48b0-b137-29d561d204f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.691811] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 940.691811] env[61545]: value = "task-4256025" [ 940.691811] env[61545]: _type = "Task" [ 940.691811] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.701443] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256025, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.939456] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 940.939736] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.803s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.940047] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.845s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.940272] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.940443] env[61545]: INFO nova.compute.manager [None req-8ca9bc82-9687-4839-b5f4-4890014d6e7a tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Successfully reverted task state from rebuilding on failure for instance. [ 940.946065] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.919s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.947977] env[61545]: INFO nova.compute.claims [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.950689] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.950840] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Cleaning up deleted instances {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11865}} [ 941.005522] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5291fb64-a00a-640e-1ab2-58af0150bbf0, 'name': SearchDatastore_Task, 'duration_secs': 0.018961} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.005874] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.006343] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553/f9c9c447-e676-4143-b329-fb6d71bcd553.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 941.006588] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bb285ad-c8b1-41d3-88d9-9f46886bdf3a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.014045] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 941.014045] env[61545]: value = "task-4256026" [ 941.014045] env[61545]: _type = "Task" [ 941.014045] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.022968] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256026, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.183006] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256024, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089631} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.183404] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.184262] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce4185e-6031-45e2-925c-7c7b5e2db895 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.213323] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] c2bb4ea0-e9fb-4198-80fa-acfd25fb226d/c2bb4ea0-e9fb-4198-80fa-acfd25fb226d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.219470] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09566db4-6337-4a2b-ab23-7d5f71dcae03 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.244169] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256025, 'name': PowerOffVM_Task, 'duration_secs': 0.230815} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.245654] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.245939] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.246346] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 941.246346] env[61545]: value = "task-4256027" [ 941.246346] env[61545]: _type = "Task" [ 941.246346] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.247212] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01817fc-f115-43f3-bf45-ddb88135e950 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.259634] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256027, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.262235] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.262571] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cabd96d7-6451-4c61-8f13-1dea3077da42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.294189] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.294452] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.294641] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Deleting the datastore file [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.294982] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55f0f0ce-8cac-42ca-a59e-43c89f1ae04d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.302838] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 941.302838] env[61545]: value = "task-4256029" [ 941.302838] env[61545]: _type = "Task" [ 941.302838] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.311731] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256029, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.466943] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] There are 35 instances to clean {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11874}} [ 941.467333] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: ecf98c79-da3d-44be-9c76-c3fccc688235] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 941.528456] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256026, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.641297] env[61545]: DEBUG nova.network.neutron [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Updated VIF entry in instance network info cache for port bf240544-0938-4b0c-9469-4d66bd37ee14. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.641297] env[61545]: DEBUG nova.network.neutron [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Updating instance_info_cache with network_info: [{"id": "bf240544-0938-4b0c-9469-4d66bd37ee14", "address": "fa:16:3e:4e:32:cd", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf240544-09", "ovs_interfaceid": "bf240544-0938-4b0c-9469-4d66bd37ee14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.763479] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256027, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.814885] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256029, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.431242} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.815314] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.815642] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.815850] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.974400] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 602bd42d-6afa-4419-8352-73a9daab2fe0] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 942.026614] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256026, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.796684} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.029430] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553/f9c9c447-e676-4143-b329-fb6d71bcd553.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 942.030439] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 942.030984] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b77aa5b-a034-46b4-aded-403a2c0f4fe4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.039533] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 942.039533] env[61545]: value = "task-4256030" [ 942.039533] env[61545]: _type = "Task" [ 942.039533] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.054493] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256030, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.143732] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcaa1cc6-aa2e-4563-9136-8c1470d36772 req-e54349ed-a881-4c9b-a821-5292a758ce27 service nova] Releasing lock "refresh_cache-b1277c3b-cd7b-43be-9eff-640145dde5e5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.261305] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256027, 'name': ReconfigVM_Task, 'duration_secs': 0.784604} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.264234] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Reconfigured VM instance instance-0000003f to attach disk [datastore2] c2bb4ea0-e9fb-4198-80fa-acfd25fb226d/c2bb4ea0-e9fb-4198-80fa-acfd25fb226d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 942.265156] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d771954c-8fc2-45f0-9290-8392d3b7d5d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.273025] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 942.273025] env[61545]: value = "task-4256031" [ 942.273025] env[61545]: _type = "Task" [ 942.273025] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.285913] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256031, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.462081] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4d1557-817e-48c0-9787-8933d4bd4a1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.472270] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d7d76e-6f2a-41b5-afd6-eb04a59d6f34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.478392] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 60edf62d-3fb8-4d85-9a4e-ef71c565d940] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 942.507409] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 13db992b-db13-451f-a853-9b7de28b9184] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 942.509844] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd7de69-e14d-4786-9e06-c00cdfbda0ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.519765] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93477ce2-94ce-46e4-9a24-d5ebec2f70e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.535404] env[61545]: DEBUG nova.compute.provider_tree [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.550493] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095781} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.550665] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 942.551507] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f766e2b4-c573-46b3-b1a5-5a1dd91bc0fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.575277] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553/f9c9c447-e676-4143-b329-fb6d71bcd553.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 942.575666] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77739083-3d51-4451-933a-a042eda4f49a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.597428] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 942.597428] env[61545]: value = "task-4256032" [ 942.597428] env[61545]: _type = "Task" [ 942.597428] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.608606] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256032, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.784183] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256031, 'name': Rename_Task, 'duration_secs': 0.261483} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.784466] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.784715] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8503948c-eff8-46ab-af7a-26cc16d81bb8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.792073] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 942.792073] env[61545]: value = "task-4256033" [ 942.792073] env[61545]: _type = "Task" [ 942.792073] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.801014] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.853144] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 942.853422] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.853575] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 942.853754] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.853899] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 942.854150] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 942.854434] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 942.854599] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 942.854769] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.854978] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.855118] env[61545]: DEBUG nova.virt.hardware [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.856018] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbca4d74-36a1-4630-93fa-16d8c6b3d4a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.865129] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674c96a5-4dee-4a0d-ad31-b9640a2007ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.880775] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.887010] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 942.887394] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 942.887652] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5398355d-b56e-4aa0-9ae2-f29238fd2484 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.905135] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.905135] env[61545]: value = "task-4256034" [ 942.905135] env[61545]: _type = "Task" [ 942.905135] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.914161] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256034, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.014437] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 6f2a4514-4de9-427d-91be-f445235696bf] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 943.038695] env[61545]: DEBUG nova.scheduler.client.report [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 943.108383] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256032, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.302745] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256033, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.416162] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256034, 'name': CreateVM_Task, 'duration_secs': 0.48655} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.416368] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 943.416845] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.417014] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.417404] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 943.417682] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96f61bce-de70-4f2b-99b2-46d156431452 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.423340] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 943.423340] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525b6568-bfe9-3f29-ab54-e80db8f461eb" [ 943.423340] env[61545]: _type = "Task" [ 943.423340] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.431796] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525b6568-bfe9-3f29-ab54-e80db8f461eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.518779] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 609ba431-b42b-4b0d-9c16-06e19bee114c] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 943.545797] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.546725] env[61545]: DEBUG nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 943.551059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.835s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.553136] env[61545]: INFO nova.compute.claims [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.612589] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256032, 'name': ReconfigVM_Task, 'duration_secs': 0.520608} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.612944] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Reconfigured VM instance instance-00000040 to attach disk [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553/f9c9c447-e676-4143-b329-fb6d71bcd553.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.613705] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93cc4647-d02d-4b16-a452-dbb14636781a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.621478] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 943.621478] env[61545]: value = "task-4256035" [ 943.621478] env[61545]: _type = "Task" [ 943.621478] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.631105] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256035, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.805844] env[61545]: DEBUG oslo_vmware.api [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256033, 'name': PowerOnVM_Task, 'duration_secs': 0.812548} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.806163] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.806376] env[61545]: INFO nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Took 10.34 seconds to spawn the instance on the hypervisor. [ 943.806666] env[61545]: DEBUG nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.807632] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c7564a-ef2e-426f-bb4b-d610072c6e6b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.935621] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525b6568-bfe9-3f29-ab54-e80db8f461eb, 'name': SearchDatastore_Task, 'duration_secs': 0.030105} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.936016] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.936294] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.936575] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.936740] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.936942] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.937285] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5146e1dd-f591-4f6d-b99f-18c06838dbdb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.948545] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.948756] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 943.949721] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed3420ae-0755-4d26-97da-fdafc7890f91 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.956368] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 943.956368] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d27efe-c5ff-6652-f650-ec0658a2d478" [ 943.956368] env[61545]: _type = "Task" [ 943.956368] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.966403] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d27efe-c5ff-6652-f650-ec0658a2d478, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.022742] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 5b2fb040-a964-479f-ae3f-4f428248d64b] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 944.058555] env[61545]: DEBUG nova.compute.utils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 944.062583] env[61545]: DEBUG nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 944.062780] env[61545]: DEBUG nova.network.neutron [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 944.137047] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256035, 'name': Rename_Task, 'duration_secs': 0.318487} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.137605] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 944.138224] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3a74fa6-d429-44b3-8b90-ff72658268c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.146853] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 944.146853] env[61545]: value = "task-4256036" [ 944.146853] env[61545]: _type = "Task" [ 944.146853] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.156833] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.180269] env[61545]: DEBUG nova.policy [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7580d3a6f9cf4799af863e85f35b0ea9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c33b968c2bbc431686e949fdf795fa76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 944.330804] env[61545]: INFO nova.compute.manager [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Took 58.76 seconds to build instance. [ 944.468998] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d27efe-c5ff-6652-f650-ec0658a2d478, 'name': SearchDatastore_Task, 'duration_secs': 0.019636} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.470034] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84fe9d46-2d88-43f0-a84a-2238eefae6d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.477293] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 944.477293] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52594f69-35fc-5c04-8c82-3aebeb5848be" [ 944.477293] env[61545]: _type = "Task" [ 944.477293] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.488723] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52594f69-35fc-5c04-8c82-3aebeb5848be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.526442] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: d517f427-8580-481b-b50f-150da6c571b9] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 944.564117] env[61545]: DEBUG nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 944.653487] env[61545]: DEBUG oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263c076-1c24-d903-a397-d8b97f5af0f5/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 944.654526] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b246b96d-d3a0-4a07-b220-137040363152 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.664332] env[61545]: DEBUG oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263c076-1c24-d903-a397-d8b97f5af0f5/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 944.664509] env[61545]: ERROR oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263c076-1c24-d903-a397-d8b97f5af0f5/disk-0.vmdk due to incomplete transfer. [ 944.669172] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4d024237-f0c3-41eb-a408-658722b6827c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.669819] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256036, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.677795] env[61545]: DEBUG oslo_vmware.rw_handles [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263c076-1c24-d903-a397-d8b97f5af0f5/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 944.678086] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Uploaded image 2416e430-378b-4a56-8d95-29053d0d652b to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 944.680922] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 944.683813] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7d96cb27-5153-4db1-aad1-c996b907d7bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.691605] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 944.691605] env[61545]: value = "task-4256037" [ 944.691605] env[61545]: _type = "Task" [ 944.691605] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.704823] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256037, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.837541] env[61545]: DEBUG oslo_concurrency.lockutils [None req-110f9e11-5849-4352-82c5-340e50f2d8c6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.015s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.998139] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52594f69-35fc-5c04-8c82-3aebeb5848be, 'name': SearchDatastore_Task, 'duration_secs': 0.023645} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.998470] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.998737] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a/19aabdc5-8d2f-4adb-aea0-34ce4482677a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 944.999021] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16e308bd-1415-4500-8578-f7daf3368a2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.007594] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 945.007594] env[61545]: value = "task-4256038" [ 945.007594] env[61545]: _type = "Task" [ 945.007594] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.018324] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.033832] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: fa08b76f-d64d-46e9-9865-1ab2e9b1d823] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 945.070061] env[61545]: DEBUG nova.network.neutron [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Successfully created port: 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.160120] env[61545]: DEBUG oslo_vmware.api [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256036, 'name': PowerOnVM_Task, 'duration_secs': 0.933344} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.163414] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 945.163637] env[61545]: INFO nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Took 8.98 seconds to spawn the instance on the hypervisor. [ 945.163835] env[61545]: DEBUG nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.169018] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f54a25c-fc3a-4150-9efa-61389d8450e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.208390] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256037, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.215288] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8a4100-d621-49da-91d8-e94806bedec6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.225220] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adc9b14-2b6d-4481-9b97-c827faca6f8b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.259875] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354f3dab-8e55-4fd6-b18e-e2876f4d3f05 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.268542] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c81d15c-df3f-4a6c-bdc2-b4ee40bb2cf8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.288889] env[61545]: DEBUG nova.compute.provider_tree [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.519195] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256038, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.537090] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: a127cc27-7155-4a7a-871a-c3e67a99bfc8] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 945.541785] env[61545]: DEBUG nova.compute.manager [req-535f32be-096c-4615-beea-8153cb9e21f2 req-b637fa06-da0f-4588-81d9-ede86dec8d29 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Received event network-changed-989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 945.541988] env[61545]: DEBUG nova.compute.manager [req-535f32be-096c-4615-beea-8153cb9e21f2 req-b637fa06-da0f-4588-81d9-ede86dec8d29 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Refreshing instance network info cache due to event network-changed-989b3fc6-0843-488f-9af2-39bb487eb78a. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 945.542231] env[61545]: DEBUG oslo_concurrency.lockutils [req-535f32be-096c-4615-beea-8153cb9e21f2 req-b637fa06-da0f-4588-81d9-ede86dec8d29 service nova] Acquiring lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.542395] env[61545]: DEBUG oslo_concurrency.lockutils [req-535f32be-096c-4615-beea-8153cb9e21f2 req-b637fa06-da0f-4588-81d9-ede86dec8d29 service nova] Acquired lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.542589] env[61545]: DEBUG nova.network.neutron [req-535f32be-096c-4615-beea-8153cb9e21f2 req-b637fa06-da0f-4588-81d9-ede86dec8d29 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Refreshing network info cache for port 989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 945.584040] env[61545]: DEBUG nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 945.624741] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 945.625115] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.625359] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 945.625633] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.625860] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 945.626094] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 945.626433] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 945.626659] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 945.626905] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 945.627211] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 945.627436] env[61545]: DEBUG nova.virt.hardware [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 945.629726] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fcab9c-0275-40dc-bcc3-cba67c26772c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.641849] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa5be39-b251-4fa8-a9a9-8beadceebdce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.691354] env[61545]: INFO nova.compute.manager [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Took 54.85 seconds to build instance. [ 945.703421] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256037, 'name': Destroy_Task, 'duration_secs': 0.790485} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.703662] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Destroyed the VM [ 945.703907] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 945.704233] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b48acce0-a90c-4847-bd6c-63a833c11d3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.711722] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 945.711722] env[61545]: value = "task-4256039" [ 945.711722] env[61545]: _type = "Task" [ 945.711722] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.722628] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256039, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.792987] env[61545]: DEBUG nova.scheduler.client.report [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 946.019012] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256038, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.019306] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a/19aabdc5-8d2f-4adb-aea0-34ce4482677a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 946.019548] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 946.019947] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bd1c12b-bb1b-4c6c-af22-a84475148fe1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.027340] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 946.027340] env[61545]: value = "task-4256040" [ 946.027340] env[61545]: _type = "Task" [ 946.027340] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.039699] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.044927] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 7d2bad05-c461-43b9-9dd0-bdefbd33e3a2] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 946.193080] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9f2cae1f-b6e8-4d01-acb0-c71f46943bce tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.224s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.225607] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256039, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.283508] env[61545]: DEBUG nova.network.neutron [req-535f32be-096c-4615-beea-8153cb9e21f2 req-b637fa06-da0f-4588-81d9-ede86dec8d29 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updated VIF entry in instance network info cache for port 989b3fc6-0843-488f-9af2-39bb487eb78a. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 946.283886] env[61545]: DEBUG nova.network.neutron [req-535f32be-096c-4615-beea-8153cb9e21f2 req-b637fa06-da0f-4588-81d9-ede86dec8d29 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updating instance_info_cache with network_info: [{"id": "989b3fc6-0843-488f-9af2-39bb487eb78a", "address": "fa:16:3e:40:56:28", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989b3fc6-08", "ovs_interfaceid": "989b3fc6-0843-488f-9af2-39bb487eb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.301320] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.301890] env[61545]: DEBUG nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 946.308622] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.237s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.308837] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.311051] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.963s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.312670] env[61545]: INFO nova.compute.claims [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.538082] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081214} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.538366] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 946.539202] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3c1ba4-241e-4534-9a65-56e7e3809657 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.552212] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 2a0576f9-d740-4dfa-9783-17eb3987840b] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 946.564792] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a/19aabdc5-8d2f-4adb-aea0-34ce4482677a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 946.565048] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-399265f8-7261-47ee-aeb3-807be5f47b94 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.588639] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 946.588639] env[61545]: value = "task-4256041" [ 946.588639] env[61545]: _type = "Task" [ 946.588639] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.598424] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256041, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.726038] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256039, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.789074] env[61545]: DEBUG oslo_concurrency.lockutils [req-535f32be-096c-4615-beea-8153cb9e21f2 req-b637fa06-da0f-4588-81d9-ede86dec8d29 service nova] Releasing lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.826197] env[61545]: DEBUG nova.compute.utils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.827779] env[61545]: DEBUG nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.827952] env[61545]: DEBUG nova.network.neutron [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.842177] env[61545]: DEBUG oslo_concurrency.lockutils [None req-53908c30-200b-4fd0-a59b-65e606773ba6 tempest-ServerActionsV293TestJSON-1352632226 tempest-ServerActionsV293TestJSON-1352632226-project-member] Lock "26e339f1-182b-4f00-b7c2-a2a32e942d04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.580s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.892488] env[61545]: DEBUG nova.policy [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6b70f49819d45aaa5fa2b56cb8cd3e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f29228f7495747ca97b16aa485960e14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 947.067611] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 1722d63d-e604-44fe-8198-13e6c5bce016] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 947.101800] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256041, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.190209] env[61545]: DEBUG nova.network.neutron [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Successfully created port: 7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.226241] env[61545]: DEBUG oslo_vmware.api [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256039, 'name': RemoveSnapshot_Task, 'duration_secs': 1.2565} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.226557] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 947.226870] env[61545]: INFO nova.compute.manager [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Took 16.96 seconds to snapshot the instance on the hypervisor. [ 947.336541] env[61545]: DEBUG nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 947.372545] env[61545]: DEBUG nova.network.neutron [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Successfully updated port: 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.390896] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "fff833ad-55af-4702-859b-05f94cac18c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.390896] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "fff833ad-55af-4702-859b-05f94cac18c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.571075] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 5a284df5-88ea-43bf-9944-ef344f99591c] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 947.577538] env[61545]: DEBUG nova.compute.manager [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Received event network-vif-plugged-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 947.577918] env[61545]: DEBUG oslo_concurrency.lockutils [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] Acquiring lock "1e5be92c-d727-4515-9e16-85ade2719455-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.578086] env[61545]: DEBUG oslo_concurrency.lockutils [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] Lock "1e5be92c-d727-4515-9e16-85ade2719455-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.578409] env[61545]: DEBUG oslo_concurrency.lockutils [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] Lock "1e5be92c-d727-4515-9e16-85ade2719455-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.579468] env[61545]: DEBUG nova.compute.manager [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] No waiting events found dispatching network-vif-plugged-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 947.579468] env[61545]: WARNING nova.compute.manager [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Received unexpected event network-vif-plugged-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f for instance with vm_state building and task_state spawning. [ 947.579468] env[61545]: DEBUG nova.compute.manager [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Received event network-changed-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 947.579727] env[61545]: DEBUG nova.compute.manager [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Refreshing instance network info cache due to event network-changed-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 947.579815] env[61545]: DEBUG oslo_concurrency.lockutils [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] Acquiring lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.579932] env[61545]: DEBUG oslo_concurrency.lockutils [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] Acquired lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.580105] env[61545]: DEBUG nova.network.neutron [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Refreshing network info cache for port 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 947.602203] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256041, 'name': ReconfigVM_Task, 'duration_secs': 0.742376} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.603167] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a/19aabdc5-8d2f-4adb-aea0-34ce4482677a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 947.603681] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa12bc77-fdfa-45f8-9ef3-e7fdd18efa9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.614721] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 947.614721] env[61545]: value = "task-4256042" [ 947.614721] env[61545]: _type = "Task" [ 947.614721] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.625426] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256042, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.785179] env[61545]: DEBUG nova.compute.manager [None req-1129b6d5-0503-466b-aa7f-9ac461014d4c tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Found 1 images (rotation: 2) {{(pid=61545) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 947.842974] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c57cc8c-04a3-4d85-8223-088e620c1032 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.852018] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c8e4d2-da34-4d1d-a94b-1024cb2edde9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.885105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.886078] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd0e54e-8c59-4607-9312-747d74d1d2ee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.895831] env[61545]: DEBUG nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 947.901039] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b320fa66-05bd-42de-9d15-8536abe6e9bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.919407] env[61545]: DEBUG nova.compute.provider_tree [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.085660] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 79ba6f70-c967-4abf-a2a7-c70046a2602d] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 948.125710] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256042, 'name': Rename_Task, 'duration_secs': 0.202006} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.126750] env[61545]: DEBUG nova.network.neutron [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.128768] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.129416] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-577e2984-97bd-4146-8931-d058768c3496 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.137405] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 948.137405] env[61545]: value = "task-4256043" [ 948.137405] env[61545]: _type = "Task" [ 948.137405] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.147138] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.254301] env[61545]: DEBUG nova.network.neutron [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.348040] env[61545]: DEBUG nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 948.371833] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 948.372259] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.372259] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 948.372401] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.372525] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 948.372668] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 948.372918] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 948.373056] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 948.373212] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 948.373370] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 948.373537] env[61545]: DEBUG nova.virt.hardware [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 948.374574] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb17636a-4401-43d5-8559-6d6c33df521e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.384021] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c11bcc-6888-4efe-92db-6941d1463a28 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.423255] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.424164] env[61545]: DEBUG nova.scheduler.client.report [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 948.595048] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 72656070-cfd0-4104-a9c7-ec20c5a6238a] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 948.641006] env[61545]: DEBUG nova.compute.manager [req-7d9094fe-794c-4c3f-a4a1-49f13b01e6b6 req-c93857f9-a715-4a4a-b35d-d896afe55866 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Received event network-vif-plugged-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 948.641247] env[61545]: DEBUG oslo_concurrency.lockutils [req-7d9094fe-794c-4c3f-a4a1-49f13b01e6b6 req-c93857f9-a715-4a4a-b35d-d896afe55866 service nova] Acquiring lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.641527] env[61545]: DEBUG oslo_concurrency.lockutils [req-7d9094fe-794c-4c3f-a4a1-49f13b01e6b6 req-c93857f9-a715-4a4a-b35d-d896afe55866 service nova] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.641663] env[61545]: DEBUG oslo_concurrency.lockutils [req-7d9094fe-794c-4c3f-a4a1-49f13b01e6b6 req-c93857f9-a715-4a4a-b35d-d896afe55866 service nova] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.641778] env[61545]: DEBUG nova.compute.manager [req-7d9094fe-794c-4c3f-a4a1-49f13b01e6b6 req-c93857f9-a715-4a4a-b35d-d896afe55866 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] No waiting events found dispatching network-vif-plugged-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 948.641926] env[61545]: WARNING nova.compute.manager [req-7d9094fe-794c-4c3f-a4a1-49f13b01e6b6 req-c93857f9-a715-4a4a-b35d-d896afe55866 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Received unexpected event network-vif-plugged-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 for instance with vm_state building and task_state spawning. [ 948.652222] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256043, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.732672] env[61545]: DEBUG nova.network.neutron [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Successfully updated port: 7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.756493] env[61545]: DEBUG oslo_concurrency.lockutils [req-1ad7c9a1-6c34-400a-a4d4-938de70f635b req-23ad8944-7618-46a9-b245-968c200550eb service nova] Releasing lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.756877] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.757454] env[61545]: DEBUG nova.network.neutron [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.929384] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.929947] env[61545]: DEBUG nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 948.932765] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.825s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.932970] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.935228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.889s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.937485] env[61545]: INFO nova.compute.claims [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.943612] env[61545]: DEBUG nova.compute.manager [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 948.944601] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91304382-e634-4e45-b99b-5d432f80a2f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.964365] env[61545]: INFO nova.scheduler.client.report [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Deleted allocations for instance e3742aa7-0b26-41f5-b8c0-9388ef2b7e74 [ 949.099047] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 6a6c77f1-39ac-46ad-aa3c-82ed3cc70b7f] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 949.149747] env[61545]: DEBUG oslo_vmware.api [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256043, 'name': PowerOnVM_Task, 'duration_secs': 0.977766} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.150018] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.150245] env[61545]: DEBUG nova.compute.manager [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 949.151015] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb31303-c87f-470b-922d-fb0e4c7a722a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.237063] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.237063] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.237063] env[61545]: DEBUG nova.network.neutron [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.290235] env[61545]: DEBUG nova.network.neutron [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.431472] env[61545]: DEBUG nova.network.neutron [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating instance_info_cache with network_info: [{"id": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "address": "fa:16:3e:38:0c:1c", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fdf9f41-2f", "ovs_interfaceid": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.446373] env[61545]: DEBUG nova.compute.utils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 949.447189] env[61545]: DEBUG nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 949.447390] env[61545]: DEBUG nova.network.neutron [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.458164] env[61545]: INFO nova.compute.manager [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] instance snapshotting [ 949.458924] env[61545]: DEBUG nova.objects.instance [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'flavor' on Instance uuid 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.471885] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c59f822-a78c-49b8-bc5d-fbd04ba62f90 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "e3742aa7-0b26-41f5-b8c0-9388ef2b7e74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.284s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.490477] env[61545]: DEBUG nova.policy [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ad96bd4fd944165b4917c4dacaea04c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50b8a600a38442278d0cf036919f87c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 949.604186] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 9dbff26a-210c-4e80-812f-c91debe3e9c1] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 949.670340] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.794251] env[61545]: DEBUG nova.network.neutron [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.877913] env[61545]: DEBUG nova.network.neutron [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Successfully created port: 7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 949.934264] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.934560] env[61545]: DEBUG nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Instance network_info: |[{"id": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "address": "fa:16:3e:38:0c:1c", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fdf9f41-2f", "ovs_interfaceid": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 949.935531] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:0c:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0467beaa-08c6-44d6-b8a2-e9c609c21ff4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.942463] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.943036] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.943271] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e1ffe3e-31c2-4aff-b6c0-a2fb700e23fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.962042] env[61545]: DEBUG nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 949.973320] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a459bb7-b09d-41a3-b83c-2b1bf786982f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.978789] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.978789] env[61545]: value = "task-4256044" [ 949.978789] env[61545]: _type = "Task" [ 949.978789] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.009234] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01371c23-a604-4b7f-b897-37cec7b6d104 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.017009] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256044, 'name': CreateVM_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.095228] env[61545]: DEBUG nova.network.neutron [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updating instance_info_cache with network_info: [{"id": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "address": "fa:16:3e:ca:a5:3c", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea1dae1-c4", "ovs_interfaceid": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.107218] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: c1b1ac1a-32da-442d-86ef-d754165f5a81] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 950.489681] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256044, 'name': CreateVM_Task, 'duration_secs': 0.336809} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.489869] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.490611] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.490772] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.491118] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 950.492059] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88337e1b-f476-428a-bcb4-56b9931323db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.496721] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 950.496721] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527c8223-eb4a-f70d-6a55-fa9ab8e60ef6" [ 950.496721] env[61545]: _type = "Task" [ 950.496721] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.508797] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527c8223-eb4a-f70d-6a55-fa9ab8e60ef6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.531172] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 950.534515] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-10f913bb-5bf8-49a1-8170-09c5c30c1664 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.543272] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 950.543272] env[61545]: value = "task-4256045" [ 950.543272] env[61545]: _type = "Task" [ 950.543272] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.565152] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256045, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.575046] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eec75d8-c3b4-4b3d-b7b1-8769b250a351 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.586378] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6d8353-ad41-42a8-9488-6f2ca78bc516 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.619063] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Releasing lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.619411] env[61545]: DEBUG nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Instance network_info: |[{"id": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "address": "fa:16:3e:ca:a5:3c", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea1dae1-c4", "ovs_interfaceid": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 950.620318] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 43cf4c96-2c8b-4520-8926-c1be5a87734e] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 950.624825] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:a5:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ea1dae1-c4a7-423c-9d65-dbc15e4848b1', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.631353] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.632115] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28557a9f-7119-4c06-93c5-45fa0e7aaecf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.635333] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.635569] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58b8dfc9-938c-4771-bef6-16805e5f9818 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.658605] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e03c3d-c5ae-49c5-9855-eeeca039a0d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.662529] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.662529] env[61545]: value = "task-4256046" [ 950.662529] env[61545]: _type = "Task" [ 950.662529] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.674883] env[61545]: DEBUG nova.compute.provider_tree [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.682313] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256046, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.740359] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.740566] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.740842] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.741089] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.741311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.743967] env[61545]: INFO nova.compute.manager [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Terminating instance [ 950.792240] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "4b29ebc4-d913-447c-bc57-890953cf8d49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.792541] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "4b29ebc4-d913-447c-bc57-890953cf8d49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.792747] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "4b29ebc4-d913-447c-bc57-890953cf8d49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.792930] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "4b29ebc4-d913-447c-bc57-890953cf8d49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.793114] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "4b29ebc4-d913-447c-bc57-890953cf8d49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.795418] env[61545]: INFO nova.compute.manager [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Terminating instance [ 950.978935] env[61545]: DEBUG nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 950.984496] env[61545]: DEBUG nova.compute.manager [req-17e0dbc4-bbc1-431e-abde-52ee2fce0ff9 req-d5bf49b5-9405-4195-9d99-c275488f61cf service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Received event network-changed-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 950.984700] env[61545]: DEBUG nova.compute.manager [req-17e0dbc4-bbc1-431e-abde-52ee2fce0ff9 req-d5bf49b5-9405-4195-9d99-c275488f61cf service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Refreshing instance network info cache due to event network-changed-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 950.984934] env[61545]: DEBUG oslo_concurrency.lockutils [req-17e0dbc4-bbc1-431e-abde-52ee2fce0ff9 req-d5bf49b5-9405-4195-9d99-c275488f61cf service nova] Acquiring lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.985136] env[61545]: DEBUG oslo_concurrency.lockutils [req-17e0dbc4-bbc1-431e-abde-52ee2fce0ff9 req-d5bf49b5-9405-4195-9d99-c275488f61cf service nova] Acquired lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.985325] env[61545]: DEBUG nova.network.neutron [req-17e0dbc4-bbc1-431e-abde-52ee2fce0ff9 req-d5bf49b5-9405-4195-9d99-c275488f61cf service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Refreshing network info cache for port 7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.009136] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 951.009396] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.009555] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 951.009773] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.009927] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 951.010094] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 951.010316] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 951.010476] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 951.010645] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 951.010805] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 951.010977] env[61545]: DEBUG nova.virt.hardware [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 951.012212] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abde9ba-a07d-4795-8c3b-ab8a9a30422f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.019252] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527c8223-eb4a-f70d-6a55-fa9ab8e60ef6, 'name': SearchDatastore_Task, 'duration_secs': 0.010766} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.020033] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.020321] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.020593] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.020742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.020923] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.021254] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6fc37e5-0ba1-4f14-bec0-7fc1c6e25f39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.026877] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96324e2-b12e-4ba2-aa7e-f61f4412addb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.032679] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.032868] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.034010] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1d8e654-810f-4183-b58e-57eb6b781d4d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.049585] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 951.049585] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52809d31-a2f1-6a0b-c176-2834056da19b" [ 951.049585] env[61545]: _type = "Task" [ 951.049585] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.057052] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256045, 'name': CreateSnapshot_Task, 'duration_secs': 0.489215} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.058044] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 951.058639] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec44bfef-52a4-4122-af9f-351f01156a31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.065897] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52809d31-a2f1-6a0b-c176-2834056da19b, 'name': SearchDatastore_Task, 'duration_secs': 0.010556} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.067339] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5707b727-fa81-458e-a544-df527d9edd24 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.073994] env[61545]: INFO nova.compute.manager [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Rebuilding instance [ 951.087022] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 951.087022] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5274eca8-3cfb-8557-4d63-b874e5e8e35d" [ 951.087022] env[61545]: _type = "Task" [ 951.087022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.095667] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5274eca8-3cfb-8557-4d63-b874e5e8e35d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.125497] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 26a6b40e-f8a4-4cc6-bdbb-586ca592901c] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 951.128096] env[61545]: DEBUG nova.compute.manager [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 951.128975] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a519eb8-7dd4-4b28-a61f-717c3ff397df {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.173679] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256046, 'name': CreateVM_Task, 'duration_secs': 0.394936} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.174685] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.175434] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.175608] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.175930] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 951.176476] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c174b1e-de2d-461f-bf93-dafc6021020e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.179022] env[61545]: DEBUG nova.scheduler.client.report [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.187169] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 951.187169] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d43e25-8cb2-5136-bef7-3ea325143846" [ 951.187169] env[61545]: _type = "Task" [ 951.187169] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.197552] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d43e25-8cb2-5136-bef7-3ea325143846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.248309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "refresh_cache-19aabdc5-8d2f-4adb-aea0-34ce4482677a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.248699] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquired lock "refresh_cache-19aabdc5-8d2f-4adb-aea0-34ce4482677a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.248929] env[61545]: DEBUG nova.network.neutron [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 951.300232] env[61545]: DEBUG nova.compute.manager [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 951.300503] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 951.301545] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74dd4f3d-708b-41d1-9e40-b0885b6f67bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.313630] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 951.313989] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35ad61d0-0edf-4c55-b062-93b64a2cc76c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.322354] env[61545]: DEBUG oslo_vmware.api [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 951.322354] env[61545]: value = "task-4256047" [ 951.322354] env[61545]: _type = "Task" [ 951.322354] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.334505] env[61545]: DEBUG oslo_vmware.api [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4256047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.412808] env[61545]: DEBUG nova.network.neutron [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Successfully updated port: 7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 951.590019] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 951.590019] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f75d8695-a225-489e-8087-592f0ec6b414 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.607040] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5274eca8-3cfb-8557-4d63-b874e5e8e35d, 'name': SearchDatastore_Task, 'duration_secs': 0.010122} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.608492] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.608800] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455/1e5be92c-d727-4515-9e16-85ade2719455.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.609223] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 951.609223] env[61545]: value = "task-4256048" [ 951.609223] env[61545]: _type = "Task" [ 951.609223] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.609457] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba0a9e79-a67d-4678-b34b-344497fb3411 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.623582] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256048, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.625321] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 951.625321] env[61545]: value = "task-4256049" [ 951.625321] env[61545]: _type = "Task" [ 951.625321] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.629086] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: e8c954ec-de76-4d3e-9a63-6c30523d5b63] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 951.642380] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.684228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.749s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.684780] env[61545]: DEBUG nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 951.688304] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.572s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.688515] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.690791] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.409s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.690994] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.693202] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.313s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.693407] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.695941] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.373s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.697944] env[61545]: INFO nova.compute.claims [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 951.716099] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d43e25-8cb2-5136-bef7-3ea325143846, 'name': SearchDatastore_Task, 'duration_secs': 0.024873} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.716375] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.716620] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.716862] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.717022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.717416] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.717540] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a44fdb8-3185-4c59-bdb7-cb171ff106e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.734034] env[61545]: INFO nova.scheduler.client.report [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Deleted allocations for instance 5f4d6338-d1af-4e58-9f76-5e95d51e76f7 [ 951.743194] env[61545]: INFO nova.scheduler.client.report [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleted allocations for instance bea2e59c-02fd-4d6d-8f10-b0e265fa87a2 [ 951.743833] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.744018] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.750240] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-280af858-c93a-4ae7-bf42-3511c460c434 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.753273] env[61545]: INFO nova.scheduler.client.report [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Deleted allocations for instance a84d7a3d-2f7e-459d-94ca-7caa32b7a472 [ 951.764103] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 951.764103] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52db43e4-ad8e-ca92-09a1-1aa3697e6a77" [ 951.764103] env[61545]: _type = "Task" [ 951.764103] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.770814] env[61545]: DEBUG nova.network.neutron [req-17e0dbc4-bbc1-431e-abde-52ee2fce0ff9 req-d5bf49b5-9405-4195-9d99-c275488f61cf service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updated VIF entry in instance network info cache for port 7ea1dae1-c4a7-423c-9d65-dbc15e4848b1. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.771187] env[61545]: DEBUG nova.network.neutron [req-17e0dbc4-bbc1-431e-abde-52ee2fce0ff9 req-d5bf49b5-9405-4195-9d99-c275488f61cf service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updating instance_info_cache with network_info: [{"id": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "address": "fa:16:3e:ca:a5:3c", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea1dae1-c4", "ovs_interfaceid": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.777578] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52db43e4-ad8e-ca92-09a1-1aa3697e6a77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.783842] env[61545]: DEBUG nova.network.neutron [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 951.832914] env[61545]: DEBUG oslo_vmware.api [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4256047, 'name': PowerOffVM_Task, 'duration_secs': 0.222446} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.833170] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 951.833347] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 951.833619] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f3d2129-777a-4fcf-aff9-da70fbd2c269 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.856461] env[61545]: DEBUG nova.network.neutron [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.907949] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.908262] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.908519] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Deleting the datastore file [datastore2] 4b29ebc4-d913-447c-bc57-890953cf8d49 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.908829] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e519e34-ae3e-4211-9643-9f486745aedb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.916629] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.916822] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.916972] env[61545]: DEBUG nova.network.neutron [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 951.920243] env[61545]: DEBUG oslo_vmware.api [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for the task: (returnval){ [ 951.920243] env[61545]: value = "task-4256051" [ 951.920243] env[61545]: _type = "Task" [ 951.920243] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.932426] env[61545]: DEBUG oslo_vmware.api [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4256051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.124345] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256048, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.141452] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 8214216a-0256-467e-ac4c-1d14b0f73b77] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 952.142879] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256049, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.143812] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.144174] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b73290d-6236-4c26-a928-2c028408a2cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.156404] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 952.156404] env[61545]: value = "task-4256052" [ 952.156404] env[61545]: _type = "Task" [ 952.156404] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.171081] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.190586] env[61545]: DEBUG nova.compute.utils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 952.193557] env[61545]: DEBUG nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.193828] env[61545]: DEBUG nova.network.neutron [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.256786] env[61545]: DEBUG oslo_concurrency.lockutils [None req-da4b5876-d1c1-4983-a950-f539aff9576e tempest-ListImageFiltersTestJSON-574044180 tempest-ListImageFiltersTestJSON-574044180-project-member] Lock "5f4d6338-d1af-4e58-9f76-5e95d51e76f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.332s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.260929] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0ac9c65e-d4d8-4d96-bad4-95928bde23c3 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "bea2e59c-02fd-4d6d-8f10-b0e265fa87a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.853s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.265992] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d1eeb806-a7ca-4710-84f1-d1a9fc7a2c84 tempest-MultipleCreateTestJSON-1583574743 tempest-MultipleCreateTestJSON-1583574743-project-member] Lock "a84d7a3d-2f7e-459d-94ca-7caa32b7a472" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.953s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.278510] env[61545]: DEBUG oslo_concurrency.lockutils [req-17e0dbc4-bbc1-431e-abde-52ee2fce0ff9 req-d5bf49b5-9405-4195-9d99-c275488f61cf service nova] Releasing lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.279065] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52db43e4-ad8e-ca92-09a1-1aa3697e6a77, 'name': SearchDatastore_Task, 'duration_secs': 0.011978} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.280586] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb5b197f-48cc-46cd-a7e2-e7fb5f42dca7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.288973] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 952.288973] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5262c778-363c-bfaf-fa8d-3395ad320900" [ 952.288973] env[61545]: _type = "Task" [ 952.288973] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.299773] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5262c778-363c-bfaf-fa8d-3395ad320900, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.307336] env[61545]: DEBUG nova.policy [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a926fd4a58ac4f989e04259d46663bc9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f598bcded6824792b972dfec9fc0fa22', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 952.360406] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Releasing lock "refresh_cache-19aabdc5-8d2f-4adb-aea0-34ce4482677a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.360901] env[61545]: DEBUG nova.compute.manager [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 952.361200] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.364112] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de66ac4e-c973-44e2-a514-a53474a9531c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.373847] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.374148] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-580f7d54-ddf0-4245-9d61-abbf049ba5b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.382146] env[61545]: DEBUG oslo_vmware.api [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 952.382146] env[61545]: value = "task-4256053" [ 952.382146] env[61545]: _type = "Task" [ 952.382146] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.393357] env[61545]: DEBUG oslo_vmware.api [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.434197] env[61545]: DEBUG oslo_vmware.api [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4256051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.458170] env[61545]: DEBUG nova.network.neutron [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 952.625609] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256048, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.636190] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256049, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650429} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.636588] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455/1e5be92c-d727-4515-9e16-85ade2719455.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.636861] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.637209] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f84cda53-384a-445f-8eac-3c218fcb3a98 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.645880] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: b2579785-d1a4-48da-ba27-6ee3098578f1] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 952.648252] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 952.648252] env[61545]: value = "task-4256054" [ 952.648252] env[61545]: _type = "Task" [ 952.648252] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.661450] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.663223] env[61545]: DEBUG nova.network.neutron [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating instance_info_cache with network_info: [{"id": "7e7e6bd8-fac2-4516-af29-a249216acca6", "address": "fa:16:3e:b8:a3:67", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e6bd8-fa", "ovs_interfaceid": "7e7e6bd8-fac2-4516-af29-a249216acca6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.678334] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256052, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.694138] env[61545]: DEBUG nova.compute.utils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 952.800745] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5262c778-363c-bfaf-fa8d-3395ad320900, 'name': SearchDatastore_Task, 'duration_secs': 0.027505} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.803565] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.803866] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8ab168cb-b0a9-403c-bdb5-b96c6d319baf/8ab168cb-b0a9-403c-bdb5-b96c6d319baf.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.804529] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6909bb6-f815-4144-ba09-86ebd72098c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.813560] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 952.813560] env[61545]: value = "task-4256055" [ 952.813560] env[61545]: _type = "Task" [ 952.813560] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.825112] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.900643] env[61545]: DEBUG oslo_vmware.api [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256053, 'name': PowerOffVM_Task, 'duration_secs': 0.313212} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.902364] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.902364] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.903527] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d7d6b57-b847-4e0a-868b-e84854631ff8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.936065] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.936503] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.936826] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Deleting the datastore file [datastore2] 19aabdc5-8d2f-4adb-aea0-34ce4482677a {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.942110] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14740cf7-2392-48ca-8d4a-ac5974fde122 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.945465] env[61545]: DEBUG oslo_vmware.api [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Task: {'id': task-4256051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.701733} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.948024] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 952.948253] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 952.948463] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 952.948644] env[61545]: INFO nova.compute.manager [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Took 1.65 seconds to destroy the instance on the hypervisor. [ 952.949016] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 952.949865] env[61545]: DEBUG nova.compute.manager [-] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 952.949966] env[61545]: DEBUG nova.network.neutron [-] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 952.955358] env[61545]: DEBUG oslo_vmware.api [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for the task: (returnval){ [ 952.955358] env[61545]: value = "task-4256057" [ 952.955358] env[61545]: _type = "Task" [ 952.955358] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.966864] env[61545]: DEBUG oslo_vmware.api [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256057, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.042247] env[61545]: DEBUG nova.network.neutron [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Successfully created port: b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.084291] env[61545]: DEBUG nova.compute.manager [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received event network-vif-plugged-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 953.086240] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.086240] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.086240] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.086240] env[61545]: DEBUG nova.compute.manager [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] No waiting events found dispatching network-vif-plugged-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 953.086240] env[61545]: WARNING nova.compute.manager [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received unexpected event network-vif-plugged-7e7e6bd8-fac2-4516-af29-a249216acca6 for instance with vm_state building and task_state spawning. [ 953.086240] env[61545]: DEBUG nova.compute.manager [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received event network-changed-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 953.086240] env[61545]: DEBUG nova.compute.manager [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Refreshing instance network info cache due to event network-changed-7e7e6bd8-fac2-4516-af29-a249216acca6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 953.086240] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] Acquiring lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.127172] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256048, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.159010] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: d7ed99e5-3f96-4053-9b9a-a4b7edb1f351] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 953.168687] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08184} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.176235] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.176235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.176235] env[61545]: DEBUG nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Instance network_info: |[{"id": "7e7e6bd8-fac2-4516-af29-a249216acca6", "address": "fa:16:3e:b8:a3:67", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e6bd8-fa", "ovs_interfaceid": "7e7e6bd8-fac2-4516-af29-a249216acca6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 953.176235] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b516285-d3b5-4cfa-bf85-09892025ea62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.178858] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] Acquired lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.179273] env[61545]: DEBUG nova.network.neutron [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Refreshing network info cache for port 7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.184620] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:a3:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5116f690-f825-4fee-8a47-42b073e716c5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e7e6bd8-fac2-4516-af29-a249216acca6', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 953.195242] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating folder: Project (50b8a600a38442278d0cf036919f87c2). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 953.200634] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ea10569-bdf5-4ce5-bf68-33a6d19bbe0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.203125] env[61545]: DEBUG nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 953.207292] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256052, 'name': PowerOffVM_Task, 'duration_secs': 0.539272} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.215085] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 953.215085] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 953.215085] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4559426e-044f-410f-a838-4a09d6413c1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.242878] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455/1e5be92c-d727-4515-9e16-85ade2719455.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.248785] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c3540fc-9ac4-4b37-babc-4b2be6628c99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.264658] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Created folder: Project (50b8a600a38442278d0cf036919f87c2) in parent group-v838542. [ 953.264866] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating folder: Instances. Parent ref: group-v838720. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 953.270234] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f93ce53e-ba1a-43ac-ae4d-fd046949fac3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.271702] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 953.272439] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d3d6d5d-24b4-4672-b679-abde8be14a73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.279697] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 953.279697] env[61545]: value = "task-4256059" [ 953.279697] env[61545]: _type = "Task" [ 953.279697] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.288770] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Created folder: Instances in parent group-v838720. [ 953.288945] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.290228] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.290228] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-799823a7-3784-4ab4-b18c-b1f9e4c57458 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.310881] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.321736] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.321736] env[61545]: value = "task-4256062" [ 953.321736] env[61545]: _type = "Task" [ 953.321736] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.331385] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256055, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.340457] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256062, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.354273] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881c3b98-821c-45f1-b7e3-63559c6fbd49 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.364966] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e901f8-c691-44ff-956c-af8d13fae575 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.408150] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70eafb89-bedf-4147-b3ad-2960f6d643dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.411269] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 953.411536] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 953.411724] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleting the datastore file [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.412034] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cfeb0677-b2f1-4bb3-a287-7691026e11c7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.422214] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b728b383-a631-420c-b4bb-27b78781df5f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.428188] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 953.428188] env[61545]: value = "task-4256063" [ 953.428188] env[61545]: _type = "Task" [ 953.428188] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.444156] env[61545]: DEBUG nova.compute.provider_tree [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.452675] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.468948] env[61545]: DEBUG oslo_vmware.api [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Task: {'id': task-4256057, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234424} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.469277] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.469477] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.469668] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.469907] env[61545]: INFO nova.compute.manager [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 953.470188] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.470390] env[61545]: DEBUG nova.compute.manager [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 953.470483] env[61545]: DEBUG nova.network.neutron [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 953.505526] env[61545]: DEBUG nova.network.neutron [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.626107] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256048, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.661632] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 1dda6a7b-ff1e-488a-af42-2e4ffc16b5a3] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 953.800921] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256059, 'name': ReconfigVM_Task, 'duration_secs': 0.497544} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.802480] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455/1e5be92c-d727-4515-9e16-85ade2719455.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.803605] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ca7b80c-6c4a-4797-ae00-33675939b930 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.814050] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 953.814050] env[61545]: value = "task-4256064" [ 953.814050] env[61545]: _type = "Task" [ 953.814050] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.832227] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256055, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705442} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.832539] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256064, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.833137] env[61545]: DEBUG nova.network.neutron [-] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.834405] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8ab168cb-b0a9-403c-bdb5-b96c6d319baf/8ab168cb-b0a9-403c-bdb5-b96c6d319baf.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.834626] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.838909] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f2a5261-4d30-4cae-a6ad-76ee3fadd439 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.841103] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256062, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.853316] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 953.853316] env[61545]: value = "task-4256065" [ 953.853316] env[61545]: _type = "Task" [ 953.853316] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.865830] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.937780] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294206} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.938165] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.938437] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.938580] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.950517] env[61545]: DEBUG nova.scheduler.client.report [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.959553] env[61545]: DEBUG nova.network.neutron [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updated VIF entry in instance network info cache for port 7e7e6bd8-fac2-4516-af29-a249216acca6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 953.959977] env[61545]: DEBUG nova.network.neutron [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating instance_info_cache with network_info: [{"id": "7e7e6bd8-fac2-4516-af29-a249216acca6", "address": "fa:16:3e:b8:a3:67", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e6bd8-fa", "ovs_interfaceid": "7e7e6bd8-fac2-4516-af29-a249216acca6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.009155] env[61545]: DEBUG nova.network.neutron [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.126912] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256048, 'name': CloneVM_Task, 'duration_secs': 2.133646} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.127250] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Created linked-clone VM from snapshot [ 954.128045] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5c3274-9cbf-447b-a4b3-a01f8b306889 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.140209] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Uploading image 289430b0-3fa0-4a1e-a8a8-6497038e41c8 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 954.167107] env[61545]: DEBUG oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 954.167107] env[61545]: value = "vm-838719" [ 954.167107] env[61545]: _type = "VirtualMachine" [ 954.167107] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 954.167601] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 0554c462-1dc5-4043-94ac-7a3d28ed05e1] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 954.169491] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7444c475-12e9-42ce-9949-904aee00c0ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.182961] env[61545]: DEBUG oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease: (returnval){ [ 954.182961] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251ca30-7461-7662-e7de-ee6b78ecf31e" [ 954.182961] env[61545]: _type = "HttpNfcLease" [ 954.182961] env[61545]: } obtained for exporting VM: (result){ [ 954.182961] env[61545]: value = "vm-838719" [ 954.182961] env[61545]: _type = "VirtualMachine" [ 954.182961] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 954.183305] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the lease: (returnval){ [ 954.183305] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251ca30-7461-7662-e7de-ee6b78ecf31e" [ 954.183305] env[61545]: _type = "HttpNfcLease" [ 954.183305] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 954.194262] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 954.194262] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251ca30-7461-7662-e7de-ee6b78ecf31e" [ 954.194262] env[61545]: _type = "HttpNfcLease" [ 954.194262] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 954.219862] env[61545]: DEBUG nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 954.244424] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:46:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1206130219',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1539558448',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 954.244689] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.244848] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 954.245369] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.245590] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 954.245747] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 954.245960] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 954.246137] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 954.246304] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 954.246466] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 954.246640] env[61545]: DEBUG nova.virt.hardware [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 954.247952] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0843345e-c5fa-4055-8b56-a99902d8f371 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.258125] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bc5274-e2f4-43d9-b384-6e8a5c6869ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.325965] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256064, 'name': Rename_Task, 'duration_secs': 0.180586} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.331344] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.331655] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee8a32cc-e186-464b-a242-de8698c9db10 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.340412] env[61545]: INFO nova.compute.manager [-] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Took 1.39 seconds to deallocate network for instance. [ 954.341181] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256062, 'name': CreateVM_Task, 'duration_secs': 0.600707} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.344408] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.344538] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 954.344538] env[61545]: value = "task-4256067" [ 954.344538] env[61545]: _type = "Task" [ 954.344538] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.345302] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.345543] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.345890] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 954.348971] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5970e194-a9d6-4d16-baa5-0972cb3f3132 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.362624] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 954.362624] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c74baa-387d-7c70-76c6-5b82daae56b7" [ 954.362624] env[61545]: _type = "Task" [ 954.362624] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.366751] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256067, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.373086] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149911} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.373916] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.374691] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2916da48-bda2-46df-a991-6148e5098580 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.381857] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c74baa-387d-7c70-76c6-5b82daae56b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011845} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.382762] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.383076] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.383381] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.383546] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.384036] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.384036] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83723217-3bb1-4767-b59c-aa00a94e889f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.410482] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 8ab168cb-b0a9-403c-bdb5-b96c6d319baf/8ab168cb-b0a9-403c-bdb5-b96c6d319baf.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.411616] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72ad99dd-703f-427d-84a4-5161735c4585 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.427996] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.428324] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.429658] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef40d973-7ba1-4b6d-b1e9-f38f34262f78 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.435190] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 954.435190] env[61545]: value = "task-4256068" [ 954.435190] env[61545]: _type = "Task" [ 954.435190] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.436766] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 954.436766] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ba839-4463-da9d-bf5d-ab210ff3a476" [ 954.436766] env[61545]: _type = "Task" [ 954.436766] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.454076] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256068, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.457533] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.458035] env[61545]: DEBUG nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 954.460781] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ba839-4463-da9d-bf5d-ab210ff3a476, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.461393] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.240s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.461620] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.464584] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.521s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.464584] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.466962] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.652s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.467306] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.469820] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.461s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.472463] env[61545]: INFO nova.compute.claims [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 954.476444] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e1eec58-6e3d-4e63-acbf-d1d77d53f62c req-f4d09136-6eaf-42d5-87af-af8a7bdb2bb6 service nova] Releasing lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.513873] env[61545]: INFO nova.compute.manager [-] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Took 1.04 seconds to deallocate network for instance. [ 954.531489] env[61545]: INFO nova.scheduler.client.report [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Deleted allocations for instance 9cf6dd9e-40e9-4df6-9342-2850e0f93d85 [ 954.533753] env[61545]: INFO nova.scheduler.client.report [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Deleted allocations for instance 69c59bd5-1f57-4fa2-afab-348e5f57501e [ 954.561042] env[61545]: INFO nova.scheduler.client.report [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleted allocations for instance 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae [ 954.678197] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 5719daa8-a5bc-4604-b465-a57097695c6d] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 954.695213] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 954.695213] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251ca30-7461-7662-e7de-ee6b78ecf31e" [ 954.695213] env[61545]: _type = "HttpNfcLease" [ 954.695213] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 954.696246] env[61545]: DEBUG oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 954.696246] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251ca30-7461-7662-e7de-ee6b78ecf31e" [ 954.696246] env[61545]: _type = "HttpNfcLease" [ 954.696246] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 954.701023] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbc4ade-110f-42b7-a0be-dd33cbbcd5b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.707673] env[61545]: DEBUG oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f70c-aa8c-08bb-d609-e57640164b5f/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 954.708016] env[61545]: DEBUG oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f70c-aa8c-08bb-d609-e57640164b5f/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 954.858245] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.858797] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256067, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.913491] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cbcf4f06-0c75-4856-b4fc-8ba3e6ae9be1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.959184] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256068, 'name': ReconfigVM_Task, 'duration_secs': 0.350846} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.963244] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 8ab168cb-b0a9-403c-bdb5-b96c6d319baf/8ab168cb-b0a9-403c-bdb5-b96c6d319baf.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.964807] env[61545]: DEBUG nova.compute.utils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 954.966159] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ba839-4463-da9d-bf5d-ab210ff3a476, 'name': SearchDatastore_Task, 'duration_secs': 0.036179} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.971699] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-925705f1-02ba-4842-ab30-e60c71d23753 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.973709] env[61545]: DEBUG nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 954.973885] env[61545]: DEBUG nova.network.neutron [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 954.976726] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a59000d1-d4e5-460d-84c9-7fb8601307d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.993794] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 954.993794] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f2836f-270e-ba6b-5d41-dad8a52f5ad8" [ 954.993794] env[61545]: _type = "Task" [ 954.993794] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.997721] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 954.998450] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.998450] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 954.998450] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.998606] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 954.998767] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 954.999016] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 954.999242] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 954.999731] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 954.999731] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 954.999837] env[61545]: DEBUG nova.virt.hardware [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 955.000273] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 955.000273] env[61545]: value = "task-4256069" [ 955.000273] env[61545]: _type = "Task" [ 955.000273] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.001060] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb157d5e-7a9c-4949-9583-5a0e8ac702a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.026152] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba79468-e02f-4297-b801-903b28602c88 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.033158] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.033529] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256069, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.033807] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f2836f-270e-ba6b-5d41-dad8a52f5ad8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.051504] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:96:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af2c9b85-3238-4b4f-b74f-b72d7b575e73', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.059561] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 955.061664] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c70d1024-3f30-4291-8fbc-54a0773c4e8c tempest-ServersV294TestFqdnHostnames-598528889 tempest-ServersV294TestFqdnHostnames-598528889-project-member] Lock "9cf6dd9e-40e9-4df6-9342-2850e0f93d85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.893s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.061664] env[61545]: DEBUG oslo_concurrency.lockutils [None req-09cd780a-2912-4ce1-8df3-da2d8e811b70 tempest-ServerShowV247Test-1416521926 tempest-ServerShowV247Test-1416521926-project-member] Lock "69c59bd5-1f57-4fa2-afab-348e5f57501e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.094s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.062791] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.063366] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2357ebe-7b0f-4fa3-af75-c5280d8120b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.085082] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38490d9b-e56d-4717-a9b5-005d819a6118 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "63b3a0ac-6077-4b07-bff0-81e5faa6a2ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.753s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.091031] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.091031] env[61545]: value = "task-4256070" [ 955.091031] env[61545]: _type = "Task" [ 955.091031] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.107402] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256070, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.172201] env[61545]: DEBUG nova.policy [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '113fc58985704b0b9e0a28be2f61cd68', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9007a6e389c0467c8e2077309984eaab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 955.181025] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 8a3ac91d-8949-4745-9161-1a70899c0293] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 955.360696] env[61545]: DEBUG oslo_vmware.api [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256067, 'name': PowerOnVM_Task, 'duration_secs': 0.945048} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.361647] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.362734] env[61545]: INFO nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Took 9.78 seconds to spawn the instance on the hypervisor. [ 955.363106] env[61545]: DEBUG nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.364137] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13167a0f-3821-4692-a46e-aee79c6d2385 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.479092] env[61545]: DEBUG nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 955.525956] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f2836f-270e-ba6b-5d41-dad8a52f5ad8, 'name': SearchDatastore_Task, 'duration_secs': 0.034585} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.528104] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.528104] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42/4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.528398] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7276499-2661-4073-960c-e3be1f5645f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.540900] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256069, 'name': Rename_Task, 'duration_secs': 0.164882} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.545303] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.551520] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2e32e4a-f222-481d-b0cf-a2f122ce7b1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.558210] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 955.558210] env[61545]: value = "task-4256071" [ 955.558210] env[61545]: _type = "Task" [ 955.558210] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.575644] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 955.575644] env[61545]: value = "task-4256072" [ 955.575644] env[61545]: _type = "Task" [ 955.575644] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.578471] env[61545]: DEBUG nova.compute.manager [req-a9600cf9-d2ec-427e-aa3a-e6ce66bf90ba req-f85b3025-2923-4c85-8aca-0bae7f9b41a8 service nova] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Received event network-vif-deleted-af3b8dc6-019b-4076-b26a-f6eaaa30a979 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 955.596995] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.608686] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256072, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.621939] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256070, 'name': CreateVM_Task, 'duration_secs': 0.480793} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.621939] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 955.621939] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.621939] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.621939] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 955.621939] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6504707-4b7f-48eb-a76b-b8c330e862af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.631466] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 955.631466] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282ca4a-6352-d0ef-d29c-3d76e1492e0f" [ 955.631466] env[61545]: _type = "Task" [ 955.631466] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.646382] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282ca4a-6352-d0ef-d29c-3d76e1492e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.688397] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 4f879b20-bae0-4d50-b5e9-378356341962] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 955.870363] env[61545]: DEBUG nova.network.neutron [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Successfully created port: f7745e62-9a91-4729-af18-5a9f49312659 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.896767] env[61545]: INFO nova.compute.manager [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Took 58.89 seconds to build instance. [ 956.078862] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256071, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.101839] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256072, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.146969] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282ca4a-6352-d0ef-d29c-3d76e1492e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.01562} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.151572] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.151721] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.152423] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.152423] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.152655] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.153747] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de2d979f-0a37-4a9c-a696-8df1c2ef3e87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.173934] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.174741] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.176530] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ce1e1a-ee3a-4d14-9458-531f514f4fcd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.189555] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 956.189555] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5275db96-1c9f-29b5-7d95-6a5b2d80b006" [ 956.189555] env[61545]: _type = "Task" [ 956.189555] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.200797] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 5d9eadff-7f13-4720-8119-5829b4802c21] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 956.216122] env[61545]: DEBUG nova.network.neutron [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Successfully updated port: b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 956.218215] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5275db96-1c9f-29b5-7d95-6a5b2d80b006, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.309448] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36b7a6f-4cc7-498f-b664-70a20b3af795 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.321620] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80215968-bd2f-40a5-9f52-0b5e0e9f2391 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.361454] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea37e02-afcf-4889-ae1c-682c40e5c0ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.365547] env[61545]: DEBUG nova.compute.manager [req-361f1636-cb09-42b5-8d8d-8fb96da8a876 req-558b2f8e-8550-47c0-8d8c-4f4b765efbb1 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Received event network-vif-plugged-b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 956.365785] env[61545]: DEBUG oslo_concurrency.lockutils [req-361f1636-cb09-42b5-8d8d-8fb96da8a876 req-558b2f8e-8550-47c0-8d8c-4f4b765efbb1 service nova] Acquiring lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.365991] env[61545]: DEBUG oslo_concurrency.lockutils [req-361f1636-cb09-42b5-8d8d-8fb96da8a876 req-558b2f8e-8550-47c0-8d8c-4f4b765efbb1 service nova] Lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.366181] env[61545]: DEBUG oslo_concurrency.lockutils [req-361f1636-cb09-42b5-8d8d-8fb96da8a876 req-558b2f8e-8550-47c0-8d8c-4f4b765efbb1 service nova] Lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.366349] env[61545]: DEBUG nova.compute.manager [req-361f1636-cb09-42b5-8d8d-8fb96da8a876 req-558b2f8e-8550-47c0-8d8c-4f4b765efbb1 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] No waiting events found dispatching network-vif-plugged-b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.366508] env[61545]: WARNING nova.compute.manager [req-361f1636-cb09-42b5-8d8d-8fb96da8a876 req-558b2f8e-8550-47c0-8d8c-4f4b765efbb1 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Received unexpected event network-vif-plugged-b310c98a-9de2-40bc-a430-b4d1724a069b for instance with vm_state building and task_state spawning. [ 956.374555] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66afeed-fb0a-4336-a4b0-4b79a8920c68 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.397826] env[61545]: DEBUG nova.compute.provider_tree [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.401502] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62d9fc90-43c7-4452-bd8e-42e59223efa0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.927s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.491064] env[61545]: DEBUG nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 956.523834] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 956.524342] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.524533] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.524720] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.524865] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.525128] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 956.525450] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 956.525450] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 956.525737] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 956.525940] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 956.526269] env[61545]: DEBUG nova.virt.hardware [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 956.527047] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440a5df3-fbae-4e98-ac87-f9fae2042f7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.538321] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33b9412-285c-41c5-b55e-45689c0fa81f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.579864] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.688622} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.580222] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42/4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.580652] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.581037] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4196d1b7-67fe-4001-b71d-ef8cff3c7008 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.600992] env[61545]: DEBUG oslo_vmware.api [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256072, 'name': PowerOnVM_Task, 'duration_secs': 0.760205} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.604256] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 956.604256] env[61545]: INFO nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Took 8.25 seconds to spawn the instance on the hypervisor. [ 956.604256] env[61545]: DEBUG nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 956.604256] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 956.604256] env[61545]: value = "task-4256073" [ 956.604256] env[61545]: _type = "Task" [ 956.604256] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.604920] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d985519b-fbcb-4ad8-b40e-bb838b80e45a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.630547] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256073, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.711144] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 579fb20b-083f-4227-9a13-c0f1ea36e272] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 956.717731] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5275db96-1c9f-29b5-7d95-6a5b2d80b006, 'name': SearchDatastore_Task, 'duration_secs': 0.02471} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.721173] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b4f0117-d4a4-40d0-ae2c-b99f6f3083c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.726272] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.726622] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquired lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.726871] env[61545]: DEBUG nova.network.neutron [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.736022] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 956.736022] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52539b9c-0b1c-be18-4163-ac0c58628337" [ 956.736022] env[61545]: _type = "Task" [ 956.736022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.756047] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52539b9c-0b1c-be18-4163-ac0c58628337, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.902201] env[61545]: DEBUG nova.scheduler.client.report [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.117569] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256073, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107945} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.117877] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.118771] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cadeab-c64e-41cf-8ebc-682b70c95621 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.153380] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42/4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.155305] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44d915ae-e72c-46c1-8c2b-2cae98d73878 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.175029] env[61545]: INFO nova.compute.manager [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Took 58.48 seconds to build instance. [ 957.176230] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "9b62358e-c834-461c-9954-49f513b0f4ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.176464] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "9b62358e-c834-461c-9954-49f513b0f4ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.177049] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "9b62358e-c834-461c-9954-49f513b0f4ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.177049] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "9b62358e-c834-461c-9954-49f513b0f4ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.177272] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "9b62358e-c834-461c-9954-49f513b0f4ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.179619] env[61545]: INFO nova.compute.manager [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Terminating instance [ 957.186084] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 957.186084] env[61545]: value = "task-4256074" [ 957.186084] env[61545]: _type = "Task" [ 957.186084] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.196757] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256074, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.220031] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 256e48c1-81de-4d32-97dc-ba80541a9239] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 957.251016] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52539b9c-0b1c-be18-4163-ac0c58628337, 'name': SearchDatastore_Task, 'duration_secs': 0.021704} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.251468] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.251740] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7/56680678-c844-4dd2-8541-d50de83b22d7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.252067] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04b3bafd-597d-4015-ba49-3ac08fb57056 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.261101] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 957.261101] env[61545]: value = "task-4256075" [ 957.261101] env[61545]: _type = "Task" [ 957.261101] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.273457] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.370148] env[61545]: DEBUG nova.network.neutron [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 957.411631] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.942s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.413093] env[61545]: DEBUG nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 957.417946] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.962s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.417946] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.422034] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 24.735s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.471475] env[61545]: INFO nova.scheduler.client.report [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Deleted allocations for instance 1537dbf0-d1b6-410f-8333-788761dd24d7 [ 957.679782] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e4a8fe4-9c55-47c4-9372-c4007b87d80a tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.528s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.684450] env[61545]: DEBUG nova.compute.manager [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 957.684677] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.685741] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbf50c3-ca9f-41ac-a182-dc6d4a8d817a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.705053] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.706507] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.706914] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c78ec16d-0b8c-43c4-a42f-ad13eb633773 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.717936] env[61545]: DEBUG oslo_vmware.api [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 957.717936] env[61545]: value = "task-4256076" [ 957.717936] env[61545]: _type = "Task" [ 957.717936] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.725641] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 1a551e66-1b98-44fd-ad16-c20113d9b1a6] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 957.738833] env[61545]: DEBUG oslo_vmware.api [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.777356] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256075, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.889213] env[61545]: DEBUG nova.network.neutron [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Updating instance_info_cache with network_info: [{"id": "b310c98a-9de2-40bc-a430-b4d1724a069b", "address": "fa:16:3e:ee:16:8a", "network": {"id": "c24102bb-211e-48c8-bbd1-8f1a28f06f9e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587394794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f598bcded6824792b972dfec9fc0fa22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb310c98a-9d", "ovs_interfaceid": "b310c98a-9de2-40bc-a430-b4d1724a069b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.926585] env[61545]: DEBUG nova.objects.instance [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lazy-loading 'migration_context' on Instance uuid dad53420-37f1-42ef-b0d3-e35c73b97417 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.930709] env[61545]: DEBUG nova.compute.utils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 957.933433] env[61545]: DEBUG nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 957.933433] env[61545]: DEBUG nova.network.neutron [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 957.983044] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c1e40c7-e060-4b73-8ad2-15b2123548c4 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "1537dbf0-d1b6-410f-8333-788761dd24d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.202s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.093539] env[61545]: DEBUG nova.policy [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25fd6c8662bd4b7f9da546ec78acda02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68a860104885480d9da472bc969ba6d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 958.212762] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256074, 'name': ReconfigVM_Task, 'duration_secs': 0.818624} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.213137] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42/4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.213687] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30d90c1f-d0e3-4223-b2c8-fafa4f443df9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.225412] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 958.225412] env[61545]: value = "task-4256077" [ 958.225412] env[61545]: _type = "Task" [ 958.225412] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.231992] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 2a43ac48-cdea-48c8-b3d2-e939c69ce2dc] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 958.247898] env[61545]: DEBUG oslo_vmware.api [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256076, 'name': PowerOffVM_Task, 'duration_secs': 0.408587} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.251982] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.252395] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 958.252728] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256077, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.252954] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e30f4478-a3bc-4e14-bef9-813b3542e5bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.276831] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256075, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.328404] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.328471] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.328605] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleting the datastore file [datastore2] 9b62358e-c834-461c-9954-49f513b0f4ac {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.328886] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9a1744f-fcf9-4ed3-9f52-af7cb6b8d732 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.338365] env[61545]: DEBUG oslo_vmware.api [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 958.338365] env[61545]: value = "task-4256079" [ 958.338365] env[61545]: _type = "Task" [ 958.338365] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.351407] env[61545]: DEBUG oslo_vmware.api [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256079, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.394055] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Releasing lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.394452] env[61545]: DEBUG nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Instance network_info: |[{"id": "b310c98a-9de2-40bc-a430-b4d1724a069b", "address": "fa:16:3e:ee:16:8a", "network": {"id": "c24102bb-211e-48c8-bbd1-8f1a28f06f9e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587394794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f598bcded6824792b972dfec9fc0fa22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb310c98a-9d", "ovs_interfaceid": "b310c98a-9de2-40bc-a430-b4d1724a069b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 958.395267] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:16:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1cf14cf-4f9c-41af-90d0-62e363eb4fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b310c98a-9de2-40bc-a430-b4d1724a069b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.407790] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 958.407986] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.409846] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cad4694-5a69-4ad4-8c8c-cc86083aa600 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.438430] env[61545]: DEBUG nova.network.neutron [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Successfully updated port: f7745e62-9a91-4729-af18-5a9f49312659 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.443545] env[61545]: DEBUG nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 958.455487] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.455487] env[61545]: value = "task-4256081" [ 958.455487] env[61545]: _type = "Task" [ 958.455487] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.472566] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256081, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.506148] env[61545]: DEBUG nova.compute.manager [req-4d9d958b-c373-4bd7-81dd-1fda08ac989a req-62c0d8de-9021-46a6-8afd-d53a767bab92 service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Received event network-vif-plugged-f7745e62-9a91-4729-af18-5a9f49312659 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 958.506608] env[61545]: DEBUG oslo_concurrency.lockutils [req-4d9d958b-c373-4bd7-81dd-1fda08ac989a req-62c0d8de-9021-46a6-8afd-d53a767bab92 service nova] Acquiring lock "578ce929-99fd-47ae-8275-e4ac9abe8d49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.506947] env[61545]: DEBUG oslo_concurrency.lockutils [req-4d9d958b-c373-4bd7-81dd-1fda08ac989a req-62c0d8de-9021-46a6-8afd-d53a767bab92 service nova] Lock "578ce929-99fd-47ae-8275-e4ac9abe8d49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.507205] env[61545]: DEBUG oslo_concurrency.lockutils [req-4d9d958b-c373-4bd7-81dd-1fda08ac989a req-62c0d8de-9021-46a6-8afd-d53a767bab92 service nova] Lock "578ce929-99fd-47ae-8275-e4ac9abe8d49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.507449] env[61545]: DEBUG nova.compute.manager [req-4d9d958b-c373-4bd7-81dd-1fda08ac989a req-62c0d8de-9021-46a6-8afd-d53a767bab92 service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] No waiting events found dispatching network-vif-plugged-f7745e62-9a91-4729-af18-5a9f49312659 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 958.507688] env[61545]: WARNING nova.compute.manager [req-4d9d958b-c373-4bd7-81dd-1fda08ac989a req-62c0d8de-9021-46a6-8afd-d53a767bab92 service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Received unexpected event network-vif-plugged-f7745e62-9a91-4729-af18-5a9f49312659 for instance with vm_state building and task_state spawning. [ 958.745661] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256077, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.749157] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.749919] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Cleaning up deleted instances with incomplete migration {{(pid=61545) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11903}} [ 958.775996] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256075, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.850505] env[61545]: DEBUG oslo_vmware.api [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267189} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.854236] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.854610] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.854897] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.855230] env[61545]: INFO nova.compute.manager [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Took 1.17 seconds to destroy the instance on the hypervisor. [ 958.855617] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 958.856207] env[61545]: DEBUG nova.compute.manager [-] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 958.856422] env[61545]: DEBUG nova.network.neutron [-] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 958.886568] env[61545]: DEBUG nova.compute.manager [req-121e238d-716e-4f33-9726-fc7ecdc2722d req-b0d677c2-cf9a-4df7-a3ac-c4b8bd7c82d2 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Received event network-changed-b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 958.887337] env[61545]: DEBUG nova.compute.manager [req-121e238d-716e-4f33-9726-fc7ecdc2722d req-b0d677c2-cf9a-4df7-a3ac-c4b8bd7c82d2 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Refreshing instance network info cache due to event network-changed-b310c98a-9de2-40bc-a430-b4d1724a069b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 958.887585] env[61545]: DEBUG oslo_concurrency.lockutils [req-121e238d-716e-4f33-9726-fc7ecdc2722d req-b0d677c2-cf9a-4df7-a3ac-c4b8bd7c82d2 service nova] Acquiring lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.887730] env[61545]: DEBUG oslo_concurrency.lockutils [req-121e238d-716e-4f33-9726-fc7ecdc2722d req-b0d677c2-cf9a-4df7-a3ac-c4b8bd7c82d2 service nova] Acquired lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.887914] env[61545]: DEBUG nova.network.neutron [req-121e238d-716e-4f33-9726-fc7ecdc2722d req-b0d677c2-cf9a-4df7-a3ac-c4b8bd7c82d2 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Refreshing network info cache for port b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 958.948662] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "refresh_cache-578ce929-99fd-47ae-8275-e4ac9abe8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.949087] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "refresh_cache-578ce929-99fd-47ae-8275-e4ac9abe8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.949206] env[61545]: DEBUG nova.network.neutron [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.981120] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256081, 'name': CreateVM_Task, 'duration_secs': 0.513359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.985930] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.986472] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.986679] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.987027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 958.987408] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cce0d9f-6476-4a4c-b06f-3d736d1a40d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.994627] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 958.994627] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522491fa-e43c-f75b-58ae-dd7356dbe5aa" [ 958.994627] env[61545]: _type = "Task" [ 958.994627] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.005948] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522491fa-e43c-f75b-58ae-dd7356dbe5aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.076347] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facfbfce-b693-4fd5-9215-bd3ba392126e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.086427] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9e907e-63b0-40fa-9afa-f502e9eb2e23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.134336] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51748e0-d2fd-4ae9-84fe-6b2b841b5c5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.148517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca24369-7f8f-496f-8ce0-1c6bf50dbcd1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.166252] env[61545]: DEBUG nova.compute.provider_tree [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.247550] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256077, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.252227] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.279325] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256075, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.703944} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.279753] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7/56680678-c844-4dd2-8541-d50de83b22d7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.279925] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.280398] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-999999fb-8095-4536-a468-354f75d52e5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.294027] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 959.294027] env[61545]: value = "task-4256082" [ 959.294027] env[61545]: _type = "Task" [ 959.294027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.307629] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256082, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.466181] env[61545]: DEBUG nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 959.500639] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 959.500905] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.501151] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 959.501317] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.501489] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 959.501657] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 959.501910] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 959.502820] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 959.503103] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 959.503329] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 959.503559] env[61545]: DEBUG nova.virt.hardware [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 959.507780] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c358b6d-07d1-4008-a022-503e60cfb2b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.517388] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522491fa-e43c-f75b-58ae-dd7356dbe5aa, 'name': SearchDatastore_Task, 'duration_secs': 0.028176} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.520464] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.520797] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.520942] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.521036] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.521227] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.521575] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbbadaac-5aaf-465a-b492-68246a32eb1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.524541] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458cae17-ed4c-46f1-88e3-f9a12210f606 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.544919] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.545248] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.546035] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d92c3d2-ef81-4535-a507-976d4b708b99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.552817] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 959.552817] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c16026-90fe-6953-a030-f659d3f6a60c" [ 959.552817] env[61545]: _type = "Task" [ 959.552817] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.563254] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c16026-90fe-6953-a030-f659d3f6a60c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.633391] env[61545]: DEBUG nova.network.neutron [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.672633] env[61545]: DEBUG nova.scheduler.client.report [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.750108] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256077, 'name': Rename_Task, 'duration_secs': 1.04814} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.750549] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.753744] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d0077c0-8fd1-4e0e-84a2-a14aaca8aa5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.760577] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 959.760577] env[61545]: value = "task-4256083" [ 959.760577] env[61545]: _type = "Task" [ 959.760577] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.763029] env[61545]: DEBUG nova.network.neutron [req-121e238d-716e-4f33-9726-fc7ecdc2722d req-b0d677c2-cf9a-4df7-a3ac-c4b8bd7c82d2 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Updated VIF entry in instance network info cache for port b310c98a-9de2-40bc-a430-b4d1724a069b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 959.763029] env[61545]: DEBUG nova.network.neutron [req-121e238d-716e-4f33-9726-fc7ecdc2722d req-b0d677c2-cf9a-4df7-a3ac-c4b8bd7c82d2 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Updating instance_info_cache with network_info: [{"id": "b310c98a-9de2-40bc-a430-b4d1724a069b", "address": "fa:16:3e:ee:16:8a", "network": {"id": "c24102bb-211e-48c8-bbd1-8f1a28f06f9e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587394794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f598bcded6824792b972dfec9fc0fa22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb310c98a-9d", "ovs_interfaceid": "b310c98a-9de2-40bc-a430-b4d1724a069b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.779494] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256083, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.806633] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256082, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08693} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.807345] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.807937] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab46920-7242-4ad3-96db-b87a9662a384 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.838619] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7/56680678-c844-4dd2-8541-d50de83b22d7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.838949] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76b1c60f-df75-4aaa-a99d-684913a3986e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.855482] env[61545]: DEBUG nova.network.neutron [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Successfully created port: dfbfdd5b-df4f-4326-b48a-69bd14494d5a {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.864036] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 959.864036] env[61545]: value = "task-4256084" [ 959.864036] env[61545]: _type = "Task" [ 959.864036] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.875310] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256084, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.894699] env[61545]: DEBUG nova.network.neutron [-] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.065672] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c16026-90fe-6953-a030-f659d3f6a60c, 'name': SearchDatastore_Task, 'duration_secs': 0.018512} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.067813] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8362fd95-fb0d-4a18-9f76-8ef7081a4466 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.075946] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 960.075946] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5290c98a-82bd-675f-e41c-f8f94a7c6755" [ 960.075946] env[61545]: _type = "Task" [ 960.075946] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.086038] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5290c98a-82bd-675f-e41c-f8f94a7c6755, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.152748] env[61545]: DEBUG nova.network.neutron [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Updating instance_info_cache with network_info: [{"id": "f7745e62-9a91-4729-af18-5a9f49312659", "address": "fa:16:3e:76:4a:39", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7745e62-9a", "ovs_interfaceid": "f7745e62-9a91-4729-af18-5a9f49312659", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.265262] env[61545]: DEBUG oslo_concurrency.lockutils [req-121e238d-716e-4f33-9726-fc7ecdc2722d req-b0d677c2-cf9a-4df7-a3ac-c4b8bd7c82d2 service nova] Releasing lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.279038] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256083, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.379409] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256084, 'name': ReconfigVM_Task, 'duration_secs': 0.496317} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.379813] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7/56680678-c844-4dd2-8541-d50de83b22d7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.381257] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62c8f222-d38c-42fa-a5d4-37f741c9f4df {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.389988] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 960.389988] env[61545]: value = "task-4256085" [ 960.389988] env[61545]: _type = "Task" [ 960.389988] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.398289] env[61545]: INFO nova.compute.manager [-] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Took 1.54 seconds to deallocate network for instance. [ 960.412861] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256085, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.596141] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5290c98a-82bd-675f-e41c-f8f94a7c6755, 'name': SearchDatastore_Task, 'duration_secs': 0.017961} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.597610] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.601933] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 73df6df0-ead6-49cd-8b0a-5e95acfc7e15/73df6df0-ead6-49cd-8b0a-5e95acfc7e15.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.601933] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9525d67d-3e54-4d06-b083-7ad6e35d3c35 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.610550] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 960.610550] env[61545]: value = "task-4256086" [ 960.610550] env[61545]: _type = "Task" [ 960.610550] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.624671] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256086, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.656035] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "refresh_cache-578ce929-99fd-47ae-8275-e4ac9abe8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.656373] env[61545]: DEBUG nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Instance network_info: |[{"id": "f7745e62-9a91-4729-af18-5a9f49312659", "address": "fa:16:3e:76:4a:39", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7745e62-9a", "ovs_interfaceid": "f7745e62-9a91-4729-af18-5a9f49312659", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 960.656872] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:4a:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d0c6fd7-3cc9-4818-9475-8f15900394cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7745e62-9a91-4729-af18-5a9f49312659', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.667847] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 960.669199] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.669199] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae6cf8f1-a5c9-4343-93d2-e45a259704ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.689480] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.267s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.695434] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.293s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.696884] env[61545]: INFO nova.compute.claims [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.707695] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.707695] env[61545]: value = "task-4256087" [ 960.707695] env[61545]: _type = "Task" [ 960.707695] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.721039] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256087, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.779026] env[61545]: DEBUG oslo_vmware.api [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256083, 'name': PowerOnVM_Task, 'duration_secs': 0.665504} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.779337] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 960.779560] env[61545]: INFO nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Took 9.80 seconds to spawn the instance on the hypervisor. [ 960.779771] env[61545]: DEBUG nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 960.780736] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2ac7d6-9a9f-4839-87ff-3766c6321334 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.903181] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256085, 'name': Rename_Task, 'duration_secs': 0.258291} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.903494] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.903791] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6437d5cd-bcbf-47f4-b2e7-08c3583e8e32 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.912089] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 960.912089] env[61545]: value = "task-4256088" [ 960.912089] env[61545]: _type = "Task" [ 960.912089] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.918433] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.923035] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256088, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.040389] env[61545]: DEBUG nova.compute.manager [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Received event network-changed-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 961.040713] env[61545]: DEBUG nova.compute.manager [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Refreshing instance network info cache due to event network-changed-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 961.040840] env[61545]: DEBUG oslo_concurrency.lockutils [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] Acquiring lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.041035] env[61545]: DEBUG oslo_concurrency.lockutils [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] Acquired lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.041225] env[61545]: DEBUG nova.network.neutron [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Refreshing network info cache for port 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 961.124128] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256086, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.162223] env[61545]: DEBUG nova.compute.manager [req-dfdc836a-9c77-43ca-b63e-4cb539d11926 req-31396dfd-1132-4bd2-8172-10444dff1110 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Received event network-changed-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 961.162223] env[61545]: DEBUG nova.compute.manager [req-dfdc836a-9c77-43ca-b63e-4cb539d11926 req-31396dfd-1132-4bd2-8172-10444dff1110 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Refreshing instance network info cache due to event network-changed-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 961.162353] env[61545]: DEBUG oslo_concurrency.lockutils [req-dfdc836a-9c77-43ca-b63e-4cb539d11926 req-31396dfd-1132-4bd2-8172-10444dff1110 service nova] Acquiring lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.162524] env[61545]: DEBUG oslo_concurrency.lockutils [req-dfdc836a-9c77-43ca-b63e-4cb539d11926 req-31396dfd-1132-4bd2-8172-10444dff1110 service nova] Acquired lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.162748] env[61545]: DEBUG nova.network.neutron [req-dfdc836a-9c77-43ca-b63e-4cb539d11926 req-31396dfd-1132-4bd2-8172-10444dff1110 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Refreshing network info cache for port 7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 961.223125] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256087, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.307671] env[61545]: INFO nova.compute.manager [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Took 57.98 seconds to build instance. [ 961.426820] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256088, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.627977] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256086, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.733782} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.628484] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 73df6df0-ead6-49cd-8b0a-5e95acfc7e15/73df6df0-ead6-49cd-8b0a-5e95acfc7e15.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.628706] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.628781] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97d8a2a8-671f-4f6f-a1f6-ea2a3d98c538 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.640028] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 961.640028] env[61545]: value = "task-4256089" [ 961.640028] env[61545]: _type = "Task" [ 961.640028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.658871] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.729575] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256087, 'name': CreateVM_Task, 'duration_secs': 0.802591} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.729575] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.730789] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.730789] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.733261] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.733261] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-515ef66e-31ca-4d64-a9d0-da3534b6d61d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.740290] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 961.740290] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52816978-80ec-a789-e413-400661568b21" [ 961.740290] env[61545]: _type = "Task" [ 961.740290] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.752317] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.752532] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.752788] env[61545]: INFO nova.compute.manager [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Rebooting instance [ 961.755448] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52816978-80ec-a789-e413-400661568b21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.816107] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae055f3-9abc-47ec-bcb9-f788e5e2f8a9 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.123s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.926680] env[61545]: DEBUG oslo_vmware.api [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256088, 'name': PowerOnVM_Task, 'duration_secs': 0.9739} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.926982] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.927236] env[61545]: DEBUG nova.compute.manager [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 961.928105] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af13b07-c2cf-4f0b-9c20-2ca754ace6a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.146297] env[61545]: DEBUG nova.network.neutron [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updated VIF entry in instance network info cache for port 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 962.146659] env[61545]: DEBUG nova.network.neutron [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating instance_info_cache with network_info: [{"id": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "address": "fa:16:3e:38:0c:1c", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fdf9f41-2f", "ovs_interfaceid": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.156227] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090719} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.159118] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.161493] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d426ba50-dac9-4f72-8186-cbfab979ee78 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.190237] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 73df6df0-ead6-49cd-8b0a-5e95acfc7e15/73df6df0-ead6-49cd-8b0a-5e95acfc7e15.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.193749] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16e6f790-44f9-44a4-86f7-27a760a7f988 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.209242] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.209519] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.209736] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.209922] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.210135] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.212205] env[61545]: INFO nova.compute.manager [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Terminating instance [ 962.233519] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 962.233519] env[61545]: value = "task-4256090" [ 962.233519] env[61545]: _type = "Task" [ 962.233519] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.249292] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256090, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.255021] env[61545]: INFO nova.compute.manager [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Swapping old allocation on dict_keys(['7015027d-c4e1-4938-ac31-6e4672774d7e']) held by migration cba5117b-fb7a-4947-b2b6-06dabedaf661 for instance [ 962.266941] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52816978-80ec-a789-e413-400661568b21, 'name': SearchDatastore_Task, 'duration_secs': 0.040067} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.267816] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.267816] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.267816] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.268149] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.268149] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.271325] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90a28113-4a57-47b0-ad11-23a4c7ecc51f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.284434] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.284434] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.284434] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-363ddb41-9366-4675-8eaf-937536ac7e60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.290647] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 962.290647] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521ce960-fae2-4e08-788e-24f7ab40d0b1" [ 962.290647] env[61545]: _type = "Task" [ 962.290647] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.292211] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.297474] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a25b40-dcea-414e-bd46-9da5b2f410fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.300960] env[61545]: DEBUG nova.scheduler.client.report [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Overwriting current allocation {'allocations': {'7015027d-c4e1-4938-ac31-6e4672774d7e': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 92}}, 'project_id': 'f56d2d605ffd4d098959105ab53d9803', 'user_id': 'de2ff39939bd440b8df0819c626fc2ca', 'consumer_generation': 1} on consumer dad53420-37f1-42ef-b0d3-e35c73b97417 {{(pid=61545) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 962.312069] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521ce960-fae2-4e08-788e-24f7ab40d0b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.313389] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3c9017-0748-4681-bba5-46e79a4fe756 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.348880] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb37d3b-9dec-4063-89ca-34e7835b7b6e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.358389] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f845974-ee6f-4f93-a163-9ead3c8d5d43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.377018] env[61545]: DEBUG nova.compute.provider_tree [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.435456] env[61545]: DEBUG nova.network.neutron [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Successfully updated port: dfbfdd5b-df4f-4326-b48a-69bd14494d5a {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.453965] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.469885] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.470109] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquired lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.470296] env[61545]: DEBUG nova.network.neutron [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.651759] env[61545]: DEBUG oslo_concurrency.lockutils [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] Releasing lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.651759] env[61545]: DEBUG nova.compute.manager [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Received event network-changed-f7745e62-9a91-4729-af18-5a9f49312659 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 962.651759] env[61545]: DEBUG nova.compute.manager [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Refreshing instance network info cache due to event network-changed-f7745e62-9a91-4729-af18-5a9f49312659. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 962.651759] env[61545]: DEBUG oslo_concurrency.lockutils [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] Acquiring lock "refresh_cache-578ce929-99fd-47ae-8275-e4ac9abe8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.651759] env[61545]: DEBUG oslo_concurrency.lockutils [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] Acquired lock "refresh_cache-578ce929-99fd-47ae-8275-e4ac9abe8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.651759] env[61545]: DEBUG nova.network.neutron [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Refreshing network info cache for port f7745e62-9a91-4729-af18-5a9f49312659 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 962.722806] env[61545]: DEBUG nova.compute.manager [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 962.723131] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.724495] env[61545]: DEBUG nova.network.neutron [req-dfdc836a-9c77-43ca-b63e-4cb539d11926 req-31396dfd-1132-4bd2-8172-10444dff1110 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updated VIF entry in instance network info cache for port 7ea1dae1-c4a7-423c-9d65-dbc15e4848b1. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 962.724870] env[61545]: DEBUG nova.network.neutron [req-dfdc836a-9c77-43ca-b63e-4cb539d11926 req-31396dfd-1132-4bd2-8172-10444dff1110 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updating instance_info_cache with network_info: [{"id": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "address": "fa:16:3e:ca:a5:3c", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea1dae1-c4", "ovs_interfaceid": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.726854] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9022a197-d12a-4082-81b5-3fd94bf7971a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.737150] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.741120] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbfbddd6-e7f4-4033-866f-dbff82d73650 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.749837] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.752401] env[61545]: DEBUG oslo_vmware.api [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 962.752401] env[61545]: value = "task-4256091" [ 962.752401] env[61545]: _type = "Task" [ 962.752401] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.766504] env[61545]: DEBUG oslo_vmware.api [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4256091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.804871] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521ce960-fae2-4e08-788e-24f7ab40d0b1, 'name': SearchDatastore_Task, 'duration_secs': 0.021721} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.804871] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4fc90ab-9475-4265-b688-7ea42f147a0a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.812309] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 962.812309] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ae790d-5cfc-d0c4-117a-9cc2f36ccd16" [ 962.812309] env[61545]: _type = "Task" [ 962.812309] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.827373] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ae790d-5cfc-d0c4-117a-9cc2f36ccd16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.881740] env[61545]: DEBUG nova.scheduler.client.report [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.938258] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-5ba53915-ab57-493e-b2e1-7f3d1b3845ee" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.938425] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-5ba53915-ab57-493e-b2e1-7f3d1b3845ee" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.939209] env[61545]: DEBUG nova.network.neutron [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 963.235561] env[61545]: DEBUG oslo_concurrency.lockutils [req-dfdc836a-9c77-43ca-b63e-4cb539d11926 req-31396dfd-1132-4bd2-8172-10444dff1110 service nova] Releasing lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.235561] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquired lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.235561] env[61545]: DEBUG nova.network.neutron [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 963.245270] env[61545]: DEBUG nova.compute.manager [req-7a166632-3a30-44e5-be97-1d979fefdce2 req-79eb8736-3fcf-4f23-a406-98667dd03ab1 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received event network-changed-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 963.248046] env[61545]: DEBUG nova.compute.manager [req-7a166632-3a30-44e5-be97-1d979fefdce2 req-79eb8736-3fcf-4f23-a406-98667dd03ab1 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Refreshing instance network info cache due to event network-changed-7e7e6bd8-fac2-4516-af29-a249216acca6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 963.248046] env[61545]: DEBUG oslo_concurrency.lockutils [req-7a166632-3a30-44e5-be97-1d979fefdce2 req-79eb8736-3fcf-4f23-a406-98667dd03ab1 service nova] Acquiring lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.248046] env[61545]: DEBUG oslo_concurrency.lockutils [req-7a166632-3a30-44e5-be97-1d979fefdce2 req-79eb8736-3fcf-4f23-a406-98667dd03ab1 service nova] Acquired lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.248046] env[61545]: DEBUG nova.network.neutron [req-7a166632-3a30-44e5-be97-1d979fefdce2 req-79eb8736-3fcf-4f23-a406-98667dd03ab1 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Refreshing network info cache for port 7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.258158] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256090, 'name': ReconfigVM_Task, 'duration_secs': 0.596253} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.261676] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 73df6df0-ead6-49cd-8b0a-5e95acfc7e15/73df6df0-ead6-49cd-8b0a-5e95acfc7e15.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.262982] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=61545) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 963.263490] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-3fc7abe3-cad7-4dd2-9f91-c669afeaf152 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.274422] env[61545]: DEBUG oslo_vmware.api [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4256091, 'name': PowerOffVM_Task, 'duration_secs': 0.278999} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.276821] env[61545]: DEBUG nova.compute.manager [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Received event network-vif-plugged-dfbfdd5b-df4f-4326-b48a-69bd14494d5a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 963.277084] env[61545]: DEBUG oslo_concurrency.lockutils [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] Acquiring lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.277377] env[61545]: DEBUG oslo_concurrency.lockutils [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.277557] env[61545]: DEBUG oslo_concurrency.lockutils [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.277739] env[61545]: DEBUG nova.compute.manager [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] No waiting events found dispatching network-vif-plugged-dfbfdd5b-df4f-4326-b48a-69bd14494d5a {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 963.277911] env[61545]: WARNING nova.compute.manager [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Received unexpected event network-vif-plugged-dfbfdd5b-df4f-4326-b48a-69bd14494d5a for instance with vm_state building and task_state spawning. [ 963.278109] env[61545]: DEBUG nova.compute.manager [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Received event network-changed-dfbfdd5b-df4f-4326-b48a-69bd14494d5a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 963.278335] env[61545]: DEBUG nova.compute.manager [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Refreshing instance network info cache due to event network-changed-dfbfdd5b-df4f-4326-b48a-69bd14494d5a. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 963.278696] env[61545]: DEBUG oslo_concurrency.lockutils [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] Acquiring lock "refresh_cache-5ba53915-ab57-493e-b2e1-7f3d1b3845ee" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.278887] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.278979] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.279805] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 963.279805] env[61545]: value = "task-4256094" [ 963.279805] env[61545]: _type = "Task" [ 963.279805] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.280098] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbc8c177-4b74-4285-91a6-7a41454f4b39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.294175] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256094, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.324989] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ae790d-5cfc-d0c4-117a-9cc2f36ccd16, 'name': SearchDatastore_Task, 'duration_secs': 0.018988} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.325381] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.325521] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 578ce929-99fd-47ae-8275-e4ac9abe8d49/578ce929-99fd-47ae-8275-e4ac9abe8d49.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.325832] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9dad6f36-f354-4a17-ada0-bb8ac944f0da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.334804] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 963.334804] env[61545]: value = "task-4256097" [ 963.334804] env[61545]: _type = "Task" [ 963.334804] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.346337] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.361734] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.361734] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.361734] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Deleting the datastore file [datastore2] d7e25ea6-7076-4ab2-aed6-fe5232c2665d {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.362465] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb8812fe-f3f7-4bf7-8d32-0bba423e5de6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.373892] env[61545]: DEBUG oslo_vmware.api [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for the task: (returnval){ [ 963.373892] env[61545]: value = "task-4256098" [ 963.373892] env[61545]: _type = "Task" [ 963.373892] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.385181] env[61545]: DEBUG oslo_vmware.api [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4256098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.386885] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.387436] env[61545]: DEBUG nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 963.391094] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.936s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.391979] env[61545]: INFO nova.compute.claims [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 963.453644] env[61545]: DEBUG nova.network.neutron [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance_info_cache with network_info: [{"id": "fc3b9500-79f7-4be8-a298-f3522507a716", "address": "fa:16:3e:fc:1e:9e", "network": {"id": "b5e946ac-d9a1-43e8-9b74-9e434084c7bb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.126", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5e1f4f8e2afb442e987b71a3579e05d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc3b9500-79", "ovs_interfaceid": "fc3b9500-79f7-4be8-a298-f3522507a716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.551928] env[61545]: DEBUG nova.network.neutron [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 964.605505] env[61545]: DEBUG nova.network.neutron [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Updated VIF entry in instance network info cache for port f7745e62-9a91-4729-af18-5a9f49312659. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.605941] env[61545]: DEBUG nova.network.neutron [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Updating instance_info_cache with network_info: [{"id": "f7745e62-9a91-4729-af18-5a9f49312659", "address": "fa:16:3e:76:4a:39", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7745e62-9a", "ovs_interfaceid": "f7745e62-9a91-4729-af18-5a9f49312659", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.614403] env[61545]: DEBUG nova.compute.utils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 964.618687] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Releasing lock "refresh_cache-dad53420-37f1-42ef-b0d3-e35c73b97417" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.619188] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.630094] env[61545]: DEBUG nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 964.630094] env[61545]: DEBUG nova.network.neutron [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 964.631432] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad5ad056-493c-4970-8c94-b15f9c6c4afd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.641925] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256094, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.069119} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.651222] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=61545) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 964.652115] env[61545]: DEBUG oslo_vmware.api [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Task: {'id': task-4256098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288523} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.652565] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 964.652565] env[61545]: value = "task-4256099" [ 964.652565] env[61545]: _type = "Task" [ 964.652565] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.653218] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256097, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.654099] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d8a6a8-86a4-4cf9-886c-6778a2291f45 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.657865] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 964.658107] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 964.658411] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.658731] env[61545]: INFO nova.compute.manager [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Took 1.94 seconds to destroy the instance on the hypervisor. [ 964.661252] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.661252] env[61545]: DEBUG nova.compute.manager [-] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 964.661252] env[61545]: DEBUG nova.network.neutron [-] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 964.697118] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 73df6df0-ead6-49cd-8b0a-5e95acfc7e15/ephemeral_0.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 964.701308] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bf082d6-d0da-4195-84e4-4d8634a49c1a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.716551] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256099, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.725554] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 964.725554] env[61545]: value = "task-4256100" [ 964.725554] env[61545]: _type = "Task" [ 964.725554] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.737393] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256100, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.762761] env[61545]: DEBUG nova.policy [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a76971360f074d398d059dbcb9ada6ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae60c9c1b7804134b570d0384dc85ea5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 964.831042] env[61545]: DEBUG nova.network.neutron [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Updating instance_info_cache with network_info: [{"id": "dfbfdd5b-df4f-4326-b48a-69bd14494d5a", "address": "fa:16:3e:b1:96:0a", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfbfdd5b-df", "ovs_interfaceid": "dfbfdd5b-df4f-4326-b48a-69bd14494d5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.038243] env[61545]: DEBUG nova.network.neutron [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updating instance_info_cache with network_info: [{"id": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "address": "fa:16:3e:ca:a5:3c", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea1dae1-c4", "ovs_interfaceid": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.128196] env[61545]: DEBUG oslo_concurrency.lockutils [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] Releasing lock "refresh_cache-578ce929-99fd-47ae-8275-e4ac9abe8d49" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.128576] env[61545]: DEBUG nova.compute.manager [req-baa28d1e-745d-472c-b7b6-c0058c58d6d6 req-a9bf6c96-0d39-46a9-985a-150d77d238ec service nova] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Received event network-vif-deleted-14efafb4-97cd-4720-a2dd-36f9af0a8644 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 965.129420] env[61545]: DEBUG nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 965.132453] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256097, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.669897} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.133700] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 578ce929-99fd-47ae-8275-e4ac9abe8d49/578ce929-99fd-47ae-8275-e4ac9abe8d49.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.134096] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.134657] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2921ef8-6a31-4cce-98ae-c5a8fc9867f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.144583] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 965.144583] env[61545]: value = "task-4256101" [ 965.144583] env[61545]: _type = "Task" [ 965.144583] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.164605] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256101, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.188034] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256099, 'name': PowerOffVM_Task, 'duration_secs': 0.369253} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.188034] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.188034] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0ed6a45a-0c2d-43c8-94d3-0da3debac597',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1743994676',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 965.188705] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.188705] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 965.188705] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.188705] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 965.188816] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 965.188958] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 965.189114] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 965.189743] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 965.189919] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 965.190137] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 965.198722] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f94235c0-f9c1-4932-8253-c4eb7d6f80db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.222994] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 965.222994] env[61545]: value = "task-4256102" [ 965.222994] env[61545]: _type = "Task" [ 965.222994] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.245469] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256102, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.252312] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256100, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.334703] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-5ba53915-ab57-493e-b2e1-7f3d1b3845ee" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.335131] env[61545]: DEBUG nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Instance network_info: |[{"id": "dfbfdd5b-df4f-4326-b48a-69bd14494d5a", "address": "fa:16:3e:b1:96:0a", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfbfdd5b-df", "ovs_interfaceid": "dfbfdd5b-df4f-4326-b48a-69bd14494d5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 965.335555] env[61545]: DEBUG oslo_concurrency.lockutils [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] Acquired lock "refresh_cache-5ba53915-ab57-493e-b2e1-7f3d1b3845ee" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.335709] env[61545]: DEBUG nova.network.neutron [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Refreshing network info cache for port dfbfdd5b-df4f-4326-b48a-69bd14494d5a {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 965.337251] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:96:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfbfdd5b-df4f-4326-b48a-69bd14494d5a', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.346530] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.350129] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.350663] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ccc183c-bcee-4baa-a243-f5c28d76f506 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.378408] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.378408] env[61545]: value = "task-4256103" [ 965.378408] env[61545]: _type = "Task" [ 965.378408] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.388488] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256103, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.545653] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Releasing lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.663762] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256101, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096154} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.664252] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.665425] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2181b6-4a7e-4ebb-a385-48e5840d276d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.696453] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 578ce929-99fd-47ae-8275-e4ac9abe8d49/578ce929-99fd-47ae-8275-e4ac9abe8d49.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.700013] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7d42120-8d7d-46bf-b46b-ff7e705dd0be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.734042] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 965.734042] env[61545]: value = "task-4256104" [ 965.734042] env[61545]: _type = "Task" [ 965.734042] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.740181] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256102, 'name': ReconfigVM_Task, 'duration_secs': 0.214382} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.752666] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c44db9-770f-41f7-bf5d-6d1d132ec1c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.764964] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256104, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.785649] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256100, 'name': ReconfigVM_Task, 'duration_secs': 0.549477} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.786624] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:48:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0ed6a45a-0c2d-43c8-94d3-0da3debac597',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1743994676',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 965.786857] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.787014] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 965.787273] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.787387] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 965.787541] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 965.787763] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 965.789031] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 965.789031] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 965.789031] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 965.789031] env[61545]: DEBUG nova.virt.hardware [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 965.792588] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 73df6df0-ead6-49cd-8b0a-5e95acfc7e15/ephemeral_0.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 965.793393] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-461c1c49-d87a-4fce-b8af-a285f1431729 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.798806] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-006e62df-928c-4c77-be3d-e66f24bc649d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.802362] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 965.802362] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52592419-e0e9-31e5-457e-3a72c4b5d1b9" [ 965.802362] env[61545]: _type = "Task" [ 965.802362] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.804029] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 965.804029] env[61545]: value = "task-4256105" [ 965.804029] env[61545]: _type = "Task" [ 965.804029] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.823378] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52592419-e0e9-31e5-457e-3a72c4b5d1b9, 'name': SearchDatastore_Task, 'duration_secs': 0.017326} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.835386] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 965.835386] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256105, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.838997] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49742ff4-ee13-4b2c-995b-86d35807d017 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.870097] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 965.870097] env[61545]: value = "task-4256106" [ 965.870097] env[61545]: _type = "Task" [ 965.870097] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.874018] env[61545]: DEBUG oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f70c-aa8c-08bb-d609-e57640164b5f/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 965.874018] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd1bcdc-7963-421d-8a15-74fa3050f23f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.887609] env[61545]: DEBUG oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f70c-aa8c-08bb-d609-e57640164b5f/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 965.888024] env[61545]: ERROR oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f70c-aa8c-08bb-d609-e57640164b5f/disk-0.vmdk due to incomplete transfer. [ 965.894025] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c3c7ce62-0709-4304-b81d-db86639fce42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.894025] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256106, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.899566] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256103, 'name': CreateVM_Task, 'duration_secs': 0.465114} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.901260] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.902516] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406e8e06-1014-4013-9d62-ebf711b64e23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.906053] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.908386] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.908386] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.908386] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af1d7213-8b87-4c91-94f0-72b93a21ad2b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.910579] env[61545]: DEBUG oslo_vmware.rw_handles [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f70c-aa8c-08bb-d609-e57640164b5f/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 965.911528] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Uploaded image 289430b0-3fa0-4a1e-a8a8-6497038e41c8 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 965.913709] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 965.916749] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e99a21b7-2c27-42d8-b326-f6faaaef9293 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.921122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab43861-5d7a-45b1-96ab-cf789d76b379 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.925120] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 965.925120] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cbb823-38d9-05c5-c9df-54522961ab7b" [ 965.925120] env[61545]: _type = "Task" [ 965.925120] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.960944] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 965.960944] env[61545]: value = "task-4256107" [ 965.960944] env[61545]: _type = "Task" [ 965.960944] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.965201] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aace85cb-fdb9-45d5-b941-f83e66004bdf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.981835] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cbb823-38d9-05c5-c9df-54522961ab7b, 'name': SearchDatastore_Task, 'duration_secs': 0.020246} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.982756] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.983180] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.983529] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.983699] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.983944] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.984731] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c790193-5e5d-4ddc-8b5b-cfdba10ace58 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.994681] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256107, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.996609] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebc2df6-4087-42a3-8198-d439bb7ba8b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.014279] env[61545]: DEBUG nova.compute.provider_tree [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.016941] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.019681] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.019681] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddf27fc9-255d-4975-bd1a-f6ba430ebc48 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.025412] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 966.025412] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5250db25-93ba-2cf9-67c8-a8d626aa6de6" [ 966.025412] env[61545]: _type = "Task" [ 966.025412] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.035529] env[61545]: DEBUG nova.network.neutron [req-7a166632-3a30-44e5-be97-1d979fefdce2 req-79eb8736-3fcf-4f23-a406-98667dd03ab1 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updated VIF entry in instance network info cache for port 7e7e6bd8-fac2-4516-af29-a249216acca6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 966.035529] env[61545]: DEBUG nova.network.neutron [req-7a166632-3a30-44e5-be97-1d979fefdce2 req-79eb8736-3fcf-4f23-a406-98667dd03ab1 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating instance_info_cache with network_info: [{"id": "7e7e6bd8-fac2-4516-af29-a249216acca6", "address": "fa:16:3e:b8:a3:67", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e6bd8-fa", "ovs_interfaceid": "7e7e6bd8-fac2-4516-af29-a249216acca6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.043567] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5250db25-93ba-2cf9-67c8-a8d626aa6de6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.052994] env[61545]: DEBUG nova.compute.manager [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 966.054039] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9294a9-4a92-4410-8672-cf027c4898ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.144309] env[61545]: DEBUG nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 966.173272] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 966.173501] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 966.173632] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 966.173815] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 966.173964] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 966.174125] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 966.175170] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 966.175170] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 966.175170] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 966.177280] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 966.177725] env[61545]: DEBUG nova.virt.hardware [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 966.178498] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a996d8c-93c9-4122-af22-a72d33872ad5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.188355] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e847820-26e4-4a2d-805d-f33ecbd12db4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.255048] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256104, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.322321] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256105, 'name': Rename_Task, 'duration_secs': 0.394792} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.322645] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.323037] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9897e224-3a97-4ac6-b265-a865b7674c99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.335720] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 966.335720] env[61545]: value = "task-4256108" [ 966.335720] env[61545]: _type = "Task" [ 966.335720] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.348860] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.385496] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256106, 'name': ReconfigVM_Task, 'duration_secs': 0.33905} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.387314] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 966.387314] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1dcee42-2992-47a3-8ffa-ddadacf31d3f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.410979] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417/dad53420-37f1-42ef-b0d3-e35c73b97417.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.416615] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a20bb9b-d3cf-4f5a-a0e3-eb0d67f22d6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.435266] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 966.435266] env[61545]: value = "task-4256110" [ 966.435266] env[61545]: _type = "Task" [ 966.435266] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.447684] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.475866] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256107, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.519432] env[61545]: DEBUG nova.scheduler.client.report [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.539070] env[61545]: DEBUG oslo_concurrency.lockutils [req-7a166632-3a30-44e5-be97-1d979fefdce2 req-79eb8736-3fcf-4f23-a406-98667dd03ab1 service nova] Releasing lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.539568] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5250db25-93ba-2cf9-67c8-a8d626aa6de6, 'name': SearchDatastore_Task, 'duration_secs': 0.018858} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.540477] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee83a89b-a459-4353-a46e-d8a06efd2ccd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.549496] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 966.549496] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5288d253-02be-0a15-e2ff-b57e522b8ba9" [ 966.549496] env[61545]: _type = "Task" [ 966.549496] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.561527] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5288d253-02be-0a15-e2ff-b57e522b8ba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.583228] env[61545]: DEBUG nova.network.neutron [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Successfully created port: eae1f8ca-5ff6-4f95-8ff5-9a4452601a17 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.603131] env[61545]: DEBUG nova.network.neutron [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Updated VIF entry in instance network info cache for port dfbfdd5b-df4f-4326-b48a-69bd14494d5a. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 966.603131] env[61545]: DEBUG nova.network.neutron [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Updating instance_info_cache with network_info: [{"id": "dfbfdd5b-df4f-4326-b48a-69bd14494d5a", "address": "fa:16:3e:b1:96:0a", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfbfdd5b-df", "ovs_interfaceid": "dfbfdd5b-df4f-4326-b48a-69bd14494d5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.751050] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256104, 'name': ReconfigVM_Task, 'duration_secs': 0.637549} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.751373] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 578ce929-99fd-47ae-8275-e4ac9abe8d49/578ce929-99fd-47ae-8275-e4ac9abe8d49.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.752098] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c3c2361-2c38-4b87-a3e8-c15d5877ce22 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.760424] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 966.760424] env[61545]: value = "task-4256111" [ 966.760424] env[61545]: _type = "Task" [ 966.760424] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.769829] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256111, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.847287] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256108, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.960395] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256110, 'name': ReconfigVM_Task, 'duration_secs': 0.462551} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.962904] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Reconfigured VM instance instance-00000032 to attach disk [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417/dad53420-37f1-42ef-b0d3-e35c73b97417.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.965795] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee62814-d145-43f3-9157-0f1c65c214ee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.998459] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256107, 'name': Destroy_Task, 'duration_secs': 0.750006} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.999361] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1cf7d0-3ebf-47b7-b4c6-496220236e62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.002499] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Destroyed the VM [ 967.002784] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 967.003096] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f2d7b20c-9737-477f-92a9-7ee621534197 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.028083] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.638s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.028821] env[61545]: DEBUG nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 967.033604] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.610s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.035620] env[61545]: INFO nova.compute.claims [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 967.039036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501fdef6-cb38-45f2-ad00-2332fa5c1107 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.041815] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 967.041815] env[61545]: value = "task-4256112" [ 967.041815] env[61545]: _type = "Task" [ 967.041815] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.065734] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb503ce-8c5b-463d-b69b-e08b21b5c0a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.072768] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256112, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.080373] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.085484] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62dcd8e-c0ee-45fc-b5eb-4225460caeef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.088631] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4d5d887-ea85-4207-bcc6-c0673812fec0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.091100] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5288d253-02be-0a15-e2ff-b57e522b8ba9, 'name': SearchDatastore_Task, 'duration_secs': 0.033095} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.094674] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.094674] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5ba53915-ab57-493e-b2e1-7f3d1b3845ee/5ba53915-ab57-493e-b2e1-7f3d1b3845ee.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.095155] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f00b6dcc-e77c-4207-ad7e-97e223666457 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.102264] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Doing hard reboot of VM {{(pid=61545) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 967.102697] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 967.102697] env[61545]: value = "task-4256113" [ 967.102697] env[61545]: _type = "Task" [ 967.102697] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.103514] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-2a1f06c2-1a66-4e76-b5d6-a8d5cc06e793 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.110762] env[61545]: DEBUG oslo_concurrency.lockutils [req-aa4f73bd-3e96-4b10-9dde-91b259020e60 req-d00bd2b8-d755-4b36-a86d-408c00c53600 service nova] Releasing lock "refresh_cache-5ba53915-ab57-493e-b2e1-7f3d1b3845ee" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.114307] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 967.114307] env[61545]: value = "task-4256114" [ 967.114307] env[61545]: _type = "Task" [ 967.114307] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.125064] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256113, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.126229] env[61545]: DEBUG oslo_vmware.api [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 967.126229] env[61545]: value = "task-4256115" [ 967.126229] env[61545]: _type = "Task" [ 967.126229] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.137051] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.146309] env[61545]: DEBUG oslo_vmware.api [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256115, 'name': ResetVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.273933] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256111, 'name': Rename_Task, 'duration_secs': 0.364147} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.274552] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.274814] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5541229c-2fa7-46fa-91cf-115fe65b5991 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.284998] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 967.284998] env[61545]: value = "task-4256116" [ 967.284998] env[61545]: _type = "Task" [ 967.284998] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.301062] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.351273] env[61545]: DEBUG oslo_vmware.api [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256108, 'name': PowerOnVM_Task, 'duration_secs': 0.689938} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.351273] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 967.352143] env[61545]: INFO nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Took 13.13 seconds to spawn the instance on the hypervisor. [ 967.352143] env[61545]: DEBUG nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 967.353379] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f004a3-cabb-48c0-8c17-c30a4de69770 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.447215] env[61545]: DEBUG nova.compute.manager [req-aaf1acf5-ca3e-4f8a-b37f-4ca13b893491 req-fa1d8894-b3c4-47ae-869f-b9b5de4a8750 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Received event network-vif-deleted-7e56c9b7-f0cb-41e5-b513-077c74cba86c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 967.447215] env[61545]: INFO nova.compute.manager [req-aaf1acf5-ca3e-4f8a-b37f-4ca13b893491 req-fa1d8894-b3c4-47ae-869f-b9b5de4a8750 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Neutron deleted interface 7e56c9b7-f0cb-41e5-b513-077c74cba86c; detaching it from the instance and deleting it from the info cache [ 967.447215] env[61545]: DEBUG nova.network.neutron [req-aaf1acf5-ca3e-4f8a-b37f-4ca13b893491 req-fa1d8894-b3c4-47ae-869f-b9b5de4a8750 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.451224] env[61545]: DEBUG nova.network.neutron [-] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.548362] env[61545]: DEBUG nova.compute.utils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 967.553639] env[61545]: DEBUG nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 967.553639] env[61545]: DEBUG nova.network.neutron [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 967.564566] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256112, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.627390] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256113, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.636217] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256114, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.643235] env[61545]: DEBUG oslo_vmware.api [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256115, 'name': ResetVM_Task, 'duration_secs': 0.128625} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.643524] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Did hard reboot of VM {{(pid=61545) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 967.643710] env[61545]: DEBUG nova.compute.manager [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 967.644658] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe9b7e8-be1f-438c-ae70-7ede51fd9b89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.709973] env[61545]: DEBUG nova.policy [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f01e33a652314f70a08ae1a8087a54cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f4ccb92c3be47f18fd65a22a5a1ad94', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 967.799702] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256116, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.883368] env[61545]: INFO nova.compute.manager [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Took 59.85 seconds to build instance. [ 967.949361] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7059884e-5c8b-48e3-9746-786b66b0a895 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.953989] env[61545]: INFO nova.compute.manager [-] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Took 3.29 seconds to deallocate network for instance. [ 967.963479] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d7c29d-8fd3-4c33-8d65-520961fcfd00 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.009714] env[61545]: DEBUG nova.compute.manager [req-aaf1acf5-ca3e-4f8a-b37f-4ca13b893491 req-fa1d8894-b3c4-47ae-869f-b9b5de4a8750 service nova] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Detach interface failed, port_id=7e56c9b7-f0cb-41e5-b513-077c74cba86c, reason: Instance d7e25ea6-7076-4ab2-aed6-fe5232c2665d could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 968.051179] env[61545]: DEBUG nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 968.066672] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256112, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.121222] env[61545]: DEBUG oslo_vmware.api [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256113, 'name': PowerOnVM_Task, 'duration_secs': 0.819398} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.128160] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.141474] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256114, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.710909} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.143034] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5ba53915-ab57-493e-b2e1-7f3d1b3845ee/5ba53915-ab57-493e-b2e1-7f3d1b3845ee.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 968.143034] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.143034] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09437d64-aefc-42a5-a519-0d57c2ca6a2d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.152388] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 968.152388] env[61545]: value = "task-4256117" [ 968.152388] env[61545]: _type = "Task" [ 968.152388] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.161949] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d4954c21-598c-471d-bd8b-0cdec99d21e0 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 6.409s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.170459] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256117, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.308411] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256116, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.386144] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a053134-30cb-4876-bb79-4a3863b96938 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.638s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.466081] env[61545]: DEBUG nova.network.neutron [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Successfully created port: 2019c08e-1b99-4d7b-96f0-32e559d30daf {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.477773] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.573223] env[61545]: DEBUG oslo_vmware.api [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256112, 'name': RemoveSnapshot_Task, 'duration_secs': 1.381908} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.574367] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 968.574367] env[61545]: INFO nova.compute.manager [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Took 18.60 seconds to snapshot the instance on the hypervisor. [ 968.671130] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256117, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081166} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.671467] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.672414] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7635e97-b2b9-40ef-9f63-8a01e0d018fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.707491] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 5ba53915-ab57-493e-b2e1-7f3d1b3845ee/5ba53915-ab57-493e-b2e1-7f3d1b3845ee.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.712587] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68c4e61b-d915-4ecd-83aa-03c360c1f39a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.737864] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 968.737864] env[61545]: value = "task-4256119" [ 968.737864] env[61545]: _type = "Task" [ 968.737864] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.747077] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256119, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.755025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "d0f42893-3332-4027-93df-bb46e3350485" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.755025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "d0f42893-3332-4027-93df-bb46e3350485" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.784409] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49c49ac-c29e-4545-89dd-b2936db9e5bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.801768] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4775b36f-c689-4df6-8cee-f6f621deebbc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.806069] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256116, 'name': PowerOnVM_Task} progress is 87%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.839774] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9796ad1-444a-435d-9ad0-a2fe4eafeb40 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.851288] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5d1211-28d7-4704-9060-389739abb5c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.867669] env[61545]: DEBUG nova.compute.provider_tree [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.069548] env[61545]: DEBUG nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 969.106414] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 969.106834] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.107107] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.108618] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.108618] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.108618] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 969.108618] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 969.108618] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 969.108977] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 969.109037] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 969.109755] env[61545]: DEBUG nova.virt.hardware [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 969.110714] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4781423b-b4b4-4842-b2c7-c5e2a557ba59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.124178] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e52e9d-2bc1-4f72-9f77-8015a4bcf572 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.151180] env[61545]: INFO nova.compute.manager [None req-c7c8c8fc-1ee9-4e35-a2ff-cfca39a56891 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance to original state: 'active' [ 969.156294] env[61545]: DEBUG nova.compute.manager [None req-300167a2-2dab-4340-ba22-bf72a7e246d8 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Found 2 images (rotation: 2) {{(pid=61545) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 969.253555] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256119, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.257262] env[61545]: DEBUG nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 969.264316] env[61545]: DEBUG nova.network.neutron [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Successfully updated port: eae1f8ca-5ff6-4f95-8ff5-9a4452601a17 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 969.307364] env[61545]: DEBUG oslo_vmware.api [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256116, 'name': PowerOnVM_Task, 'duration_secs': 1.667393} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.307364] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 969.307364] env[61545]: INFO nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Took 12.81 seconds to spawn the instance on the hypervisor. [ 969.307364] env[61545]: DEBUG nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 969.307364] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5502e98a-cd33-4404-b658-0f11765b4e88 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.374024] env[61545]: DEBUG nova.scheduler.client.report [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.751885] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256119, 'name': ReconfigVM_Task, 'duration_secs': 0.590622} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.752333] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 5ba53915-ab57-493e-b2e1-7f3d1b3845ee/5ba53915-ab57-493e-b2e1-7f3d1b3845ee.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.753131] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d96a335-60b0-46ac-94b8-125dcc9108a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.761121] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 969.761121] env[61545]: value = "task-4256120" [ 969.761121] env[61545]: _type = "Task" [ 969.761121] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.769143] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.769245] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.769386] env[61545]: DEBUG nova.network.neutron [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.780249] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256120, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.796392] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.832208] env[61545]: INFO nova.compute.manager [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Took 57.52 seconds to build instance. [ 969.846552] env[61545]: DEBUG nova.compute.manager [req-a5e60cf1-fe0f-4639-b509-91c1b36a3ebe req-ae1f8f3d-df9e-4a48-bd18-5dd5e3372427 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Received event network-changed-b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 969.846901] env[61545]: DEBUG nova.compute.manager [req-a5e60cf1-fe0f-4639-b509-91c1b36a3ebe req-ae1f8f3d-df9e-4a48-bd18-5dd5e3372427 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Refreshing instance network info cache due to event network-changed-b310c98a-9de2-40bc-a430-b4d1724a069b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 969.846970] env[61545]: DEBUG oslo_concurrency.lockutils [req-a5e60cf1-fe0f-4639-b509-91c1b36a3ebe req-ae1f8f3d-df9e-4a48-bd18-5dd5e3372427 service nova] Acquiring lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.847586] env[61545]: DEBUG oslo_concurrency.lockutils [req-a5e60cf1-fe0f-4639-b509-91c1b36a3ebe req-ae1f8f3d-df9e-4a48-bd18-5dd5e3372427 service nova] Acquired lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.848121] env[61545]: DEBUG nova.network.neutron [req-a5e60cf1-fe0f-4639-b509-91c1b36a3ebe req-ae1f8f3d-df9e-4a48-bd18-5dd5e3372427 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Refreshing network info cache for port b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 969.876168] env[61545]: DEBUG nova.compute.manager [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Received event network-vif-plugged-eae1f8ca-5ff6-4f95-8ff5-9a4452601a17 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 969.876517] env[61545]: DEBUG oslo_concurrency.lockutils [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] Acquiring lock "d980f421-03b5-4b0e-b547-a33031356d55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.876618] env[61545]: DEBUG oslo_concurrency.lockutils [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] Lock "d980f421-03b5-4b0e-b547-a33031356d55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.876895] env[61545]: DEBUG oslo_concurrency.lockutils [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] Lock "d980f421-03b5-4b0e-b547-a33031356d55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.876960] env[61545]: DEBUG nova.compute.manager [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] No waiting events found dispatching network-vif-plugged-eae1f8ca-5ff6-4f95-8ff5-9a4452601a17 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 969.877210] env[61545]: WARNING nova.compute.manager [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Received unexpected event network-vif-plugged-eae1f8ca-5ff6-4f95-8ff5-9a4452601a17 for instance with vm_state building and task_state spawning. [ 969.877489] env[61545]: DEBUG nova.compute.manager [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Received event network-changed-eae1f8ca-5ff6-4f95-8ff5-9a4452601a17 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 969.877886] env[61545]: DEBUG nova.compute.manager [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Refreshing instance network info cache due to event network-changed-eae1f8ca-5ff6-4f95-8ff5-9a4452601a17. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 969.877947] env[61545]: DEBUG oslo_concurrency.lockutils [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] Acquiring lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.879487] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.846s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.879487] env[61545]: DEBUG nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 969.890752] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.217s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.891521] env[61545]: DEBUG nova.objects.instance [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 969.898152] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquiring lock "fed2c050-74e7-48f1-8a19-7c58e26d2159" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.898152] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "fed2c050-74e7-48f1-8a19-7c58e26d2159" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.275457] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256120, 'name': Rename_Task, 'duration_secs': 0.200683} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.275794] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.276682] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc60a41e-8841-4b0f-9114-0fd6465252e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.285687] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 970.285687] env[61545]: value = "task-4256121" [ 970.285687] env[61545]: _type = "Task" [ 970.285687] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.298800] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.335570] env[61545]: DEBUG nova.network.neutron [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.339942] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e63d778d-9e94-4f97-8973-6e384d2fcfa5 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "578ce929-99fd-47ae-8275-e4ac9abe8d49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.676s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.389170] env[61545]: DEBUG nova.compute.utils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 970.391216] env[61545]: DEBUG nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 970.391440] env[61545]: DEBUG nova.network.neutron [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 970.405619] env[61545]: DEBUG nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 970.541653] env[61545]: DEBUG nova.policy [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '483a3570e5d8427aa281abd6624fcfb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0f97aab169448c5a0d956b1b33e1ac2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 970.670679] env[61545]: DEBUG nova.network.neutron [req-a5e60cf1-fe0f-4639-b509-91c1b36a3ebe req-ae1f8f3d-df9e-4a48-bd18-5dd5e3372427 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Updated VIF entry in instance network info cache for port b310c98a-9de2-40bc-a430-b4d1724a069b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.671211] env[61545]: DEBUG nova.network.neutron [req-a5e60cf1-fe0f-4639-b509-91c1b36a3ebe req-ae1f8f3d-df9e-4a48-bd18-5dd5e3372427 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Updating instance_info_cache with network_info: [{"id": "b310c98a-9de2-40bc-a430-b4d1724a069b", "address": "fa:16:3e:ee:16:8a", "network": {"id": "c24102bb-211e-48c8-bbd1-8f1a28f06f9e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1587394794-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f598bcded6824792b972dfec9fc0fa22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1cf14cf-4f9c-41af-90d0-62e363eb4fba", "external-id": "nsx-vlan-transportzone-521", "segmentation_id": 521, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb310c98a-9d", "ovs_interfaceid": "b310c98a-9de2-40bc-a430-b4d1724a069b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.800963] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256121, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.872386] env[61545]: DEBUG nova.network.neutron [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Updating instance_info_cache with network_info: [{"id": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "address": "fa:16:3e:5f:b3:02", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeae1f8ca-5f", "ovs_interfaceid": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.901657] env[61545]: DEBUG nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 970.911653] env[61545]: DEBUG oslo_concurrency.lockutils [None req-493c82bc-2966-425e-8121-58e6f3eaffe5 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.918253] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.060s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.919440] env[61545]: DEBUG nova.objects.instance [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lazy-loading 'resources' on Instance uuid 4b29ebc4-d913-447c-bc57-890953cf8d49 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.954603] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.174402] env[61545]: DEBUG oslo_concurrency.lockutils [req-a5e60cf1-fe0f-4639-b509-91c1b36a3ebe req-ae1f8f3d-df9e-4a48-bd18-5dd5e3372427 service nova] Releasing lock "refresh_cache-73df6df0-ead6-49cd-8b0a-5e95acfc7e15" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.300056] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256121, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.377739] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.377739] env[61545]: DEBUG nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Instance network_info: |[{"id": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "address": "fa:16:3e:5f:b3:02", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeae1f8ca-5f", "ovs_interfaceid": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 971.377739] env[61545]: DEBUG oslo_concurrency.lockutils [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] Acquired lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.377739] env[61545]: DEBUG nova.network.neutron [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Refreshing network info cache for port eae1f8ca-5ff6-4f95-8ff5-9a4452601a17 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.377739] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:b3:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eae1f8ca-5ff6-4f95-8ff5-9a4452601a17', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.386807] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 971.388227] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 971.389106] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3509275-e307-4eb3-abe9-1456c476a296 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.422713] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.422713] env[61545]: value = "task-4256123" [ 971.422713] env[61545]: _type = "Task" [ 971.422713] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.440296] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256123, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.615745] env[61545]: DEBUG nova.network.neutron [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Successfully updated port: 2019c08e-1b99-4d7b-96f0-32e559d30daf {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 971.736433] env[61545]: DEBUG nova.compute.manager [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.737903] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35ec331-fcd1-4627-8db0-97f2f091681d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.763399] env[61545]: DEBUG nova.network.neutron [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Successfully created port: 28a12f95-4a10-42db-ac3a-4fe609682144 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 971.798891] env[61545]: DEBUG oslo_vmware.api [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256121, 'name': PowerOnVM_Task, 'duration_secs': 1.028047} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.799189] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.799392] env[61545]: INFO nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Took 12.33 seconds to spawn the instance on the hypervisor. [ 971.799565] env[61545]: DEBUG nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.801681] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ed7002-8b5f-4587-939a-0f2601b6ca21 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.826369] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "dad53420-37f1-42ef-b0d3-e35c73b97417" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.826507] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.826739] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.827042] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.827339] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.829674] env[61545]: INFO nova.compute.manager [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Terminating instance [ 971.913253] env[61545]: DEBUG nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 971.933852] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256123, 'name': CreateVM_Task, 'duration_secs': 0.512064} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.934052] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 971.934804] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.935034] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.935445] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 971.938334] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41e01afe-12b4-4ceb-8447-fabfacc0af5d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.944335] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 971.944335] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52948544-3bc3-a913-be00-e7c1ced9a2c2" [ 971.944335] env[61545]: _type = "Task" [ 971.944335] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.960263] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52948544-3bc3-a913-be00-e7c1ced9a2c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.962297] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 971.962550] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.962708] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 971.962892] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.963051] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 971.963211] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 971.963426] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 971.963586] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 971.963755] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 971.963918] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 971.964114] env[61545]: DEBUG nova.virt.hardware [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 971.965183] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f543f0-feb6-4d63-94cb-f3dc43f82ee8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.978948] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79bd348-cce7-4a85-9cae-d5e36c077e3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.070393] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabb42d1-6218-4b0d-a5d5-7732ea23378a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.078519] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e2210f-6bfc-4c03-91ec-894ed81e9cda {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.111842] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771d743e-d5f1-4828-8e45-28bb1de609b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.119618] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "refresh_cache-665db895-52ce-4e7c-9a78-86db5b695534" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.119710] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "refresh_cache-665db895-52ce-4e7c-9a78-86db5b695534" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.119873] env[61545]: DEBUG nova.network.neutron [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.124742] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ff72d4-b71e-48e6-83db-f4d767847984 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.141339] env[61545]: DEBUG nova.compute.provider_tree [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.253339] env[61545]: INFO nova.compute.manager [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] instance snapshotting [ 972.253941] env[61545]: DEBUG nova.objects.instance [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'flavor' on Instance uuid 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.323823] env[61545]: INFO nova.compute.manager [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Took 50.33 seconds to build instance. [ 972.337342] env[61545]: DEBUG nova.network.neutron [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Updated VIF entry in instance network info cache for port eae1f8ca-5ff6-4f95-8ff5-9a4452601a17. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.337664] env[61545]: DEBUG nova.network.neutron [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Updating instance_info_cache with network_info: [{"id": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "address": "fa:16:3e:5f:b3:02", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeae1f8ca-5f", "ovs_interfaceid": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.341575] env[61545]: DEBUG nova.compute.manager [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 972.341575] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.341575] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e2fc4e-f1e7-4fcc-821b-c44dcb33db9f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.350822] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.351386] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2510fe0c-4320-4f48-8c9e-db74eefad5b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.359731] env[61545]: DEBUG oslo_vmware.api [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 972.359731] env[61545]: value = "task-4256124" [ 972.359731] env[61545]: _type = "Task" [ 972.359731] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.369806] env[61545]: DEBUG oslo_vmware.api [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.456557] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52948544-3bc3-a913-be00-e7c1ced9a2c2, 'name': SearchDatastore_Task, 'duration_secs': 0.017145} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.456915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.457177] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.457469] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.457625] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.457830] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.458113] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75de0f90-862c-48b5-a837-a007ca531f88 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.468145] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.468375] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 972.469163] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f94af875-c889-4f7a-a4ef-9f71e7a5bc96 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.476839] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 972.476839] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52389c1a-fd24-b487-e027-0db18e38aef1" [ 972.476839] env[61545]: _type = "Task" [ 972.476839] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.487700] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52389c1a-fd24-b487-e027-0db18e38aef1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.567395] env[61545]: DEBUG nova.compute.manager [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Received event network-changed-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 972.567683] env[61545]: DEBUG nova.compute.manager [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Refreshing instance network info cache due to event network-changed-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 972.567885] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Acquiring lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.567967] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Acquired lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.568293] env[61545]: DEBUG nova.network.neutron [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Refreshing network info cache for port 7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 972.672008] env[61545]: ERROR nova.scheduler.client.report [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] [req-c30a3d6c-6406-4549-a181-92513291864a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c30a3d6c-6406-4549-a181-92513291864a"}]} [ 972.691846] env[61545]: DEBUG nova.scheduler.client.report [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 972.710583] env[61545]: DEBUG nova.scheduler.client.report [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 972.710826] env[61545]: DEBUG nova.compute.provider_tree [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.714024] env[61545]: DEBUG nova.network.neutron [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 972.726987] env[61545]: DEBUG nova.scheduler.client.report [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 972.750534] env[61545]: DEBUG nova.scheduler.client.report [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 972.760325] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7616f98-e6f5-4a88-871b-f979eba06ea6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.796433] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4df295-07a2-4432-a920-85fc477dc600 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.829665] env[61545]: DEBUG oslo_concurrency.lockutils [None req-59d6fbcf-ec6b-4843-850d-9fb2aebd7c29 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.865s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.841673] env[61545]: DEBUG oslo_concurrency.lockutils [req-f9975ff0-e721-49a0-9904-5188a6106de1 req-17310893-a63f-4ffa-8e97-ee660e1deb35 service nova] Releasing lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.877549] env[61545]: DEBUG oslo_vmware.api [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256124, 'name': PowerOffVM_Task, 'duration_secs': 0.312543} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.877807] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.877987] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 972.878266] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e867c63-9789-400d-91f8-192f70fb5083 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.959878] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.960340] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.960584] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Deleting the datastore file [datastore2] dad53420-37f1-42ef-b0d3-e35c73b97417 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.964722] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28b94b73-f098-4435-8957-e9d7d5927173 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.974144] env[61545]: DEBUG oslo_vmware.api [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 972.974144] env[61545]: value = "task-4256127" [ 972.974144] env[61545]: _type = "Task" [ 972.974144] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.001580] env[61545]: DEBUG oslo_vmware.api [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.002305] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52389c1a-fd24-b487-e027-0db18e38aef1, 'name': SearchDatastore_Task, 'duration_secs': 0.013468} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.008327] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc9a13f1-0dfe-451d-afaf-f5be44f404d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.016358] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 973.016358] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52acccb0-0d33-f3cd-f0ae-d7b49db8ce18" [ 973.016358] env[61545]: _type = "Task" [ 973.016358] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.035064] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52acccb0-0d33-f3cd-f0ae-d7b49db8ce18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.053879] env[61545]: DEBUG nova.network.neutron [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Updating instance_info_cache with network_info: [{"id": "2019c08e-1b99-4d7b-96f0-32e559d30daf", "address": "fa:16:3e:4d:d5:bb", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2019c08e-1b", "ovs_interfaceid": "2019c08e-1b99-4d7b-96f0-32e559d30daf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.054460] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.054769] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.055018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.055949] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.057738] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.060550] env[61545]: INFO nova.compute.manager [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Terminating instance [ 973.300091] env[61545]: DEBUG nova.compute.manager [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.301362] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3857bf01-2bf0-4da5-9728-1b3b4447901f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.310413] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 973.313662] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0c674fb0-8be4-4cb7-8e7d-11428a0f4393 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.328026] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 973.328026] env[61545]: value = "task-4256128" [ 973.328026] env[61545]: _type = "Task" [ 973.328026] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.339957] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256128, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.369470] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af9ef53-c258-4fdd-a375-31fad0765773 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.378738] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dda58ac-80b9-4a97-9da8-a55bd7f797ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.413600] env[61545]: DEBUG nova.network.neutron [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updated VIF entry in instance network info cache for port 7ea1dae1-c4a7-423c-9d65-dbc15e4848b1. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 973.414043] env[61545]: DEBUG nova.network.neutron [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updating instance_info_cache with network_info: [{"id": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "address": "fa:16:3e:ca:a5:3c", "network": {"id": "4bb0b530-45da-4447-9e8c-35cdc4872914", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-399449310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f29228f7495747ca97b16aa485960e14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ea1dae1-c4", "ovs_interfaceid": "7ea1dae1-c4a7-423c-9d65-dbc15e4848b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.416384] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cbcdbf-7f2f-43ff-8088-4b8c6204fc0e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.426427] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5616b85e-4a3e-4991-a437-f4a95de54198 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.443382] env[61545]: DEBUG nova.compute.provider_tree [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.490424] env[61545]: DEBUG oslo_vmware.api [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326418} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.490692] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.490874] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.491124] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.491375] env[61545]: INFO nova.compute.manager [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Took 1.15 seconds to destroy the instance on the hypervisor. [ 973.491678] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.491919] env[61545]: DEBUG nova.compute.manager [-] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 973.493301] env[61545]: DEBUG nova.network.neutron [-] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 973.527591] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52acccb0-0d33-f3cd-f0ae-d7b49db8ce18, 'name': SearchDatastore_Task, 'duration_secs': 0.024684} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.527816] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.528098] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d980f421-03b5-4b0e-b547-a33031356d55/d980f421-03b5-4b0e-b547-a33031356d55.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 973.528403] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37b0848f-de39-40b7-86e8-e84d9679efea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.536747] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 973.536747] env[61545]: value = "task-4256129" [ 973.536747] env[61545]: _type = "Task" [ 973.536747] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.546525] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.560439] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "refresh_cache-665db895-52ce-4e7c-9a78-86db5b695534" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.560897] env[61545]: DEBUG nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Instance network_info: |[{"id": "2019c08e-1b99-4d7b-96f0-32e559d30daf", "address": "fa:16:3e:4d:d5:bb", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2019c08e-1b", "ovs_interfaceid": "2019c08e-1b99-4d7b-96f0-32e559d30daf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 973.562350] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:d5:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2019c08e-1b99-4d7b-96f0-32e559d30daf', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.572634] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Creating folder: Project (0f4ccb92c3be47f18fd65a22a5a1ad94). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.573773] env[61545]: DEBUG nova.compute.manager [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.574058] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.574404] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a5f1ef2-5058-4e24-a998-c1ef1882ab53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.577963] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4170d12-bd04-458d-b4db-80ef7eabd2ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.591522] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.591522] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a9baf0b-2826-4f0a-81c9-c966b6ef7d83 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.595131] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Created folder: Project (0f4ccb92c3be47f18fd65a22a5a1ad94) in parent group-v838542. [ 973.595453] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Creating folder: Instances. Parent ref: group-v838732. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.598974] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8155a9b4-58c6-429e-aebb-f02f798e14bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.602522] env[61545]: DEBUG oslo_vmware.api [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 973.602522] env[61545]: value = "task-4256131" [ 973.602522] env[61545]: _type = "Task" [ 973.602522] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.611702] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Created folder: Instances in parent group-v838732. [ 973.612244] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.613607] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.613607] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-479c68a0-460d-4584-ad92-023fa384e339 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.633967] env[61545]: DEBUG oslo_vmware.api [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.640323] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.640323] env[61545]: value = "task-4256133" [ 973.640323] env[61545]: _type = "Task" [ 973.640323] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.650174] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256133, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.816044] env[61545]: INFO nova.compute.manager [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] instance snapshotting [ 973.823646] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfcaa8d-f19f-43b4-b036-4fa7b998b40e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.842035] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256128, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.859241] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716fe425-f582-4ccc-9f92-151f47c38e97 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.916788] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Releasing lock "refresh_cache-8ab168cb-b0a9-403c-bdb5-b96c6d319baf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.917090] env[61545]: DEBUG nova.compute.manager [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Received event network-vif-plugged-2019c08e-1b99-4d7b-96f0-32e559d30daf {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 973.917379] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Acquiring lock "665db895-52ce-4e7c-9a78-86db5b695534-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.917848] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Lock "665db895-52ce-4e7c-9a78-86db5b695534-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.917848] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Lock "665db895-52ce-4e7c-9a78-86db5b695534-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.917995] env[61545]: DEBUG nova.compute.manager [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] No waiting events found dispatching network-vif-plugged-2019c08e-1b99-4d7b-96f0-32e559d30daf {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 973.918181] env[61545]: WARNING nova.compute.manager [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Received unexpected event network-vif-plugged-2019c08e-1b99-4d7b-96f0-32e559d30daf for instance with vm_state building and task_state spawning. [ 973.918352] env[61545]: DEBUG nova.compute.manager [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Received event network-changed-2019c08e-1b99-4d7b-96f0-32e559d30daf {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 973.918683] env[61545]: DEBUG nova.compute.manager [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Refreshing instance network info cache due to event network-changed-2019c08e-1b99-4d7b-96f0-32e559d30daf. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 973.919123] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Acquiring lock "refresh_cache-665db895-52ce-4e7c-9a78-86db5b695534" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.919292] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Acquired lock "refresh_cache-665db895-52ce-4e7c-9a78-86db5b695534" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.919534] env[61545]: DEBUG nova.network.neutron [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Refreshing network info cache for port 2019c08e-1b99-4d7b-96f0-32e559d30daf {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.946813] env[61545]: DEBUG nova.scheduler.client.report [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.049695] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256129, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.119369] env[61545]: DEBUG oslo_vmware.api [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256131, 'name': PowerOffVM_Task, 'duration_secs': 0.241442} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.119787] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.120103] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 974.120283] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59bd60d3-30b0-4d14-bee8-ae8c2d916a26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.134269] env[61545]: DEBUG nova.network.neutron [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Successfully updated port: 28a12f95-4a10-42db-ac3a-4fe609682144 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 974.158573] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256133, 'name': CreateVM_Task, 'duration_secs': 0.4893} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.158759] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.159601] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.159782] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.160348] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 974.160828] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e2b33a6-8677-4a9b-8af7-defc2ca7bdf9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.168544] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 974.168544] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5204441c-35f1-2452-a053-10d1f18e8149" [ 974.168544] env[61545]: _type = "Task" [ 974.168544] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.178659] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5204441c-35f1-2452-a053-10d1f18e8149, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.203074] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 974.203284] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 974.203421] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Deleting the datastore file [datastore2] 8ab168cb-b0a9-403c-bdb5-b96c6d319baf {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.203976] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e42a8ed-9002-4d09-a6f7-e3bfeab2ae6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.216022] env[61545]: DEBUG oslo_vmware.api [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 974.216022] env[61545]: value = "task-4256135" [ 974.216022] env[61545]: _type = "Task" [ 974.216022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.225576] env[61545]: DEBUG oslo_vmware.api [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256135, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.344962] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256128, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.373056] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 974.373056] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0929a37d-c9f8-46e9-8b64-d32c82b60b90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.378497] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.379424] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.384784] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 974.384784] env[61545]: value = "task-4256136" [ 974.384784] env[61545]: _type = "Task" [ 974.384784] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.397677] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256136, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.452658] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.534s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.455657] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.423s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.456186] env[61545]: DEBUG nova.objects.instance [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lazy-loading 'resources' on Instance uuid 19aabdc5-8d2f-4adb-aea0-34ce4482677a {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.486363] env[61545]: INFO nova.scheduler.client.report [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Deleted allocations for instance 4b29ebc4-d913-447c-bc57-890953cf8d49 [ 974.551242] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686551} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.552375] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d980f421-03b5-4b0e-b547-a33031356d55/d980f421-03b5-4b0e-b547-a33031356d55.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.552750] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.553150] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58be2d42-4940-48bf-bc20-4026487891f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.563728] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 974.563728] env[61545]: value = "task-4256137" [ 974.563728] env[61545]: _type = "Task" [ 974.563728] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.577289] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.621610] env[61545]: DEBUG nova.compute.manager [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Received event network-vif-plugged-28a12f95-4a10-42db-ac3a-4fe609682144 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 974.621846] env[61545]: DEBUG oslo_concurrency.lockutils [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] Acquiring lock "fff833ad-55af-4702-859b-05f94cac18c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.622375] env[61545]: DEBUG oslo_concurrency.lockutils [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] Lock "fff833ad-55af-4702-859b-05f94cac18c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.622606] env[61545]: DEBUG oslo_concurrency.lockutils [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] Lock "fff833ad-55af-4702-859b-05f94cac18c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.622797] env[61545]: DEBUG nova.compute.manager [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] No waiting events found dispatching network-vif-plugged-28a12f95-4a10-42db-ac3a-4fe609682144 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 974.623061] env[61545]: WARNING nova.compute.manager [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Received unexpected event network-vif-plugged-28a12f95-4a10-42db-ac3a-4fe609682144 for instance with vm_state building and task_state spawning. [ 974.624168] env[61545]: DEBUG nova.compute.manager [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Received event network-changed-28a12f95-4a10-42db-ac3a-4fe609682144 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 974.624168] env[61545]: DEBUG nova.compute.manager [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Refreshing instance network info cache due to event network-changed-28a12f95-4a10-42db-ac3a-4fe609682144. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 974.624168] env[61545]: DEBUG oslo_concurrency.lockutils [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] Acquiring lock "refresh_cache-fff833ad-55af-4702-859b-05f94cac18c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.624168] env[61545]: DEBUG oslo_concurrency.lockutils [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] Acquired lock "refresh_cache-fff833ad-55af-4702-859b-05f94cac18c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.624168] env[61545]: DEBUG nova.network.neutron [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Refreshing network info cache for port 28a12f95-4a10-42db-ac3a-4fe609682144 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.637178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "refresh_cache-fff833ad-55af-4702-859b-05f94cac18c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.681197] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5204441c-35f1-2452-a053-10d1f18e8149, 'name': SearchDatastore_Task, 'duration_secs': 0.033308} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.681511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.681763] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.682063] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.682221] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.682427] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.682702] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88bd5bd8-d5b4-4b52-9835-488476e1abc2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.693543] env[61545]: DEBUG nova.network.neutron [-] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.699635] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.699922] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.700633] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a94ac485-9556-43f5-bc40-9392a5b50c06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.707158] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 974.707158] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52135e65-7b4e-46b7-050e-52453ef8b279" [ 974.707158] env[61545]: _type = "Task" [ 974.707158] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.716781] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52135e65-7b4e-46b7-050e-52453ef8b279, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.726841] env[61545]: DEBUG oslo_vmware.api [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256135, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27164} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.727119] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.727380] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.727562] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.727745] env[61545]: INFO nova.compute.manager [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Took 1.15 seconds to destroy the instance on the hypervisor. [ 974.728020] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.728223] env[61545]: DEBUG nova.compute.manager [-] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.728354] env[61545]: DEBUG nova.network.neutron [-] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.743747] env[61545]: DEBUG nova.network.neutron [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Updated VIF entry in instance network info cache for port 2019c08e-1b99-4d7b-96f0-32e559d30daf. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.744183] env[61545]: DEBUG nova.network.neutron [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Updating instance_info_cache with network_info: [{"id": "2019c08e-1b99-4d7b-96f0-32e559d30daf", "address": "fa:16:3e:4d:d5:bb", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2019c08e-1b", "ovs_interfaceid": "2019c08e-1b99-4d7b-96f0-32e559d30daf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.843078] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256128, 'name': CreateSnapshot_Task, 'duration_secs': 1.245154} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.843359] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 974.844476] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89937766-1671-48e0-87b3-c6759eeafa65 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.887139] env[61545]: DEBUG nova.compute.utils [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 974.899765] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256136, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.998811] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e8c63acb-aa27-4e1b-8592-f5ee89272c71 tempest-ServerRescueTestJSON-824602145 tempest-ServerRescueTestJSON-824602145-project-member] Lock "4b29ebc4-d913-447c-bc57-890953cf8d49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.206s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.074811] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072973} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.077703] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.079104] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79bd8da-334b-41a9-8d08-5b1e7eda7e1d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.108998] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] d980f421-03b5-4b0e-b547-a33031356d55/d980f421-03b5-4b0e-b547-a33031356d55.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.112096] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cad0555b-69fe-45ef-b381-517072df557f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.137377] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 975.137377] env[61545]: value = "task-4256139" [ 975.137377] env[61545]: _type = "Task" [ 975.137377] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.146339] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.191481] env[61545]: DEBUG nova.network.neutron [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 975.199683] env[61545]: INFO nova.compute.manager [-] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Took 1.71 seconds to deallocate network for instance. [ 975.219430] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52135e65-7b4e-46b7-050e-52453ef8b279, 'name': SearchDatastore_Task, 'duration_secs': 0.034593} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.224367] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfcde806-f0f1-4988-93ab-43338b34767c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.230925] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 975.230925] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5258c182-7c67-dd82-442b-5d8bdd111736" [ 975.230925] env[61545]: _type = "Task" [ 975.230925] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.241449] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5258c182-7c67-dd82-442b-5d8bdd111736, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.248247] env[61545]: DEBUG oslo_concurrency.lockutils [req-b3105c10-37a0-4966-8702-3f0c3d277519 req-12c74863-53e6-4581-9bfe-c706cb0a8e60 service nova] Releasing lock "refresh_cache-665db895-52ce-4e7c-9a78-86db5b695534" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.346278] env[61545]: DEBUG nova.network.neutron [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.370768] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 975.370768] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4dde6b74-dfe0-4bec-8b33-4ad2b00a2c8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.381453] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 975.381453] env[61545]: value = "task-4256140" [ 975.381453] env[61545]: _type = "Task" [ 975.381453] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.391769] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.392934] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256140, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.405635] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256136, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.571890] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e09033-037b-4047-a368-161e966b0744 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.585315] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4914de7-77e7-4d34-a420-d79c8a641e07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.633829] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fc4c9e-f055-4ff6-84e1-627ff029b5ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.649977] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c53b0ba-2a0c-4aa3-a114-0aeb6d95cd30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.654256] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.667429] env[61545]: DEBUG nova.network.neutron [-] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.669699] env[61545]: DEBUG nova.compute.provider_tree [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.710149] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.743123] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5258c182-7c67-dd82-442b-5d8bdd111736, 'name': SearchDatastore_Task, 'duration_secs': 0.013284} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.743413] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.743679] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 665db895-52ce-4e7c-9a78-86db5b695534/665db895-52ce-4e7c-9a78-86db5b695534.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 975.744108] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cab015d3-f89f-44b0-8480-4964d4195b96 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.752264] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 975.752264] env[61545]: value = "task-4256141" [ 975.752264] env[61545]: _type = "Task" [ 975.752264] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.762028] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.849634] env[61545]: DEBUG oslo_concurrency.lockutils [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] Releasing lock "refresh_cache-fff833ad-55af-4702-859b-05f94cac18c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.849965] env[61545]: DEBUG nova.compute.manager [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Received event network-vif-deleted-fc3b9500-79f7-4be8-a298-f3522507a716 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 975.850170] env[61545]: INFO nova.compute.manager [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Neutron deleted interface fc3b9500-79f7-4be8-a298-f3522507a716; detaching it from the instance and deleting it from the info cache [ 975.850411] env[61545]: DEBUG nova.network.neutron [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.851800] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "refresh_cache-fff833ad-55af-4702-859b-05f94cac18c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.851891] env[61545]: DEBUG nova.network.neutron [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.893368] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256140, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.904897] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256136, 'name': CreateSnapshot_Task, 'duration_secs': 1.356697} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.905225] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 975.906065] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562f59f9-5e0b-4e8f-b9b7-eb2873a12022 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.150394] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256139, 'name': ReconfigVM_Task, 'duration_secs': 0.657019} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.150922] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Reconfigured VM instance instance-00000047 to attach disk [datastore2] d980f421-03b5-4b0e-b547-a33031356d55/d980f421-03b5-4b0e-b547-a33031356d55.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.151960] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c071baa-d949-4faf-8346-fc37c2074c2a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.162431] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 976.162431] env[61545]: value = "task-4256142" [ 976.162431] env[61545]: _type = "Task" [ 976.162431] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.178239] env[61545]: DEBUG nova.scheduler.client.report [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 976.182604] env[61545]: INFO nova.compute.manager [-] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Took 1.45 seconds to deallocate network for instance. [ 976.182929] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256142, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.267248] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256141, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.357589] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65b7a38a-e242-441b-9caa-f185ad9d58f0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.372973] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a59ba6d-42c9-4a85-9ea6-a395f49dc833 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.395858] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256140, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.428825] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 976.429466] env[61545]: DEBUG nova.compute.manager [req-73d35642-a728-4b73-abb4-8cccc18a2fe2 req-7a06fe9c-4bed-4553-ba2d-d417e7c6729b service nova] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Detach interface failed, port_id=fc3b9500-79f7-4be8-a298-f3522507a716, reason: Instance dad53420-37f1-42ef-b0d3-e35c73b97417 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 976.430159] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-75ffc595-3bbb-4eb3-908b-7717ed96fb10 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.440732] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 976.440732] env[61545]: value = "task-4256143" [ 976.440732] env[61545]: _type = "Task" [ 976.440732] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.452759] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256143, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.479037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.479037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.479037] env[61545]: INFO nova.compute.manager [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Attaching volume 9a1dda2e-c1d4-43e7-be45-813fb72b01c9 to /dev/sdb [ 976.524205] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fc187c-7b00-4089-a621-c41560079a02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.534138] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a841a05d-fef0-41c9-bcbc-8b2e6d44c066 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.553360] env[61545]: DEBUG nova.virt.block_device [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Updating existing volume attachment record: c1a56183-0b12-4816-ae64-61855a4190d0 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 976.620892] env[61545]: DEBUG nova.network.neutron [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 976.677483] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256142, 'name': Rename_Task, 'duration_secs': 0.35512} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.681754] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 976.682111] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e30a23da-a0c1-4d8e-b5ec-af271e96b867 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.684735] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.229s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.688039] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.770s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.688595] env[61545]: DEBUG nova.objects.instance [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lazy-loading 'resources' on Instance uuid 9b62358e-c834-461c-9954-49f513b0f4ac {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.691032] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.696147] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 976.696147] env[61545]: value = "task-4256144" [ 976.696147] env[61545]: _type = "Task" [ 976.696147] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.710505] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.717848] env[61545]: INFO nova.scheduler.client.report [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Deleted allocations for instance 19aabdc5-8d2f-4adb-aea0-34ce4482677a [ 976.767634] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256141, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765016} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.767930] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 665db895-52ce-4e7c-9a78-86db5b695534/665db895-52ce-4e7c-9a78-86db5b695534.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.768185] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.768561] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-736d18bb-7ae9-43ec-b57c-b8367f37957b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.776407] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 976.776407] env[61545]: value = "task-4256145" [ 976.776407] env[61545]: _type = "Task" [ 976.776407] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.787423] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256145, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.793628] env[61545]: DEBUG nova.compute.manager [req-e311e52f-6891-48f0-a7fd-70262c2044f5 req-6fd5e8c7-a264-48de-812e-daa9ba6c8b57 service nova] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Received event network-vif-deleted-7ea1dae1-c4a7-423c-9d65-dbc15e4848b1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 976.886125] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.886468] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.901754] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256140, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.957151] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256143, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.998755] env[61545]: DEBUG nova.network.neutron [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Updating instance_info_cache with network_info: [{"id": "28a12f95-4a10-42db-ac3a-4fe609682144", "address": "fa:16:3e:df:76:4d", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28a12f95-4a", "ovs_interfaceid": "28a12f95-4a10-42db-ac3a-4fe609682144", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.216207] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256144, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.232308] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d5f9b60b-8f7b-420b-94d3-2571363969c7 tempest-ServerShowV257Test-1669330244 tempest-ServerShowV257Test-1669330244-project-member] Lock "19aabdc5-8d2f-4adb-aea0-34ce4482677a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.491s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.297130] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256145, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135038} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.300854] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.302660] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09954a7-4502-4b33-82ad-18ad6ed15640 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.334038] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 665db895-52ce-4e7c-9a78-86db5b695534/665db895-52ce-4e7c-9a78-86db5b695534.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.337816] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7690c4a3-994f-46fc-b3d3-413e53ba3c0e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.361749] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 977.361749] env[61545]: value = "task-4256150" [ 977.361749] env[61545]: _type = "Task" [ 977.361749] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.376543] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256150, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.390468] env[61545]: DEBUG nova.compute.utils [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 977.405387] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256140, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.452202] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256143, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.502708] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "refresh_cache-fff833ad-55af-4702-859b-05f94cac18c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.503202] env[61545]: DEBUG nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Instance network_info: |[{"id": "28a12f95-4a10-42db-ac3a-4fe609682144", "address": "fa:16:3e:df:76:4d", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28a12f95-4a", "ovs_interfaceid": "28a12f95-4a10-42db-ac3a-4fe609682144", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 977.504578] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:76:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '557aba95-8968-407a-bac2-2fae66f7c8e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28a12f95-4a10-42db-ac3a-4fe609682144', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 977.515471] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 977.519067] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 977.520297] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53a58fa4-8a90-4fbb-a0c3-5a45ca3bfa54 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.549336] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 977.549336] env[61545]: value = "task-4256151" [ 977.549336] env[61545]: _type = "Task" [ 977.549336] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.560523] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256151, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.714202] env[61545]: DEBUG oslo_vmware.api [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256144, 'name': PowerOnVM_Task, 'duration_secs': 0.89759} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.714202] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.714439] env[61545]: INFO nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Took 11.57 seconds to spawn the instance on the hypervisor. [ 977.714583] env[61545]: DEBUG nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 977.715434] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89cf505d-407b-40dd-9a68-c0b6da3199dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.797756] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896a9766-655c-4ed1-a7af-6d9ce266b548 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.808757] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa2d635-690f-4198-b67f-7e63684f08b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.852013] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7af561f-181c-477d-bb5d-0faa71c1c826 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.861974] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18ce0bc-3a2f-45d7-aaae-2f41574a1aa7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.882577] env[61545]: DEBUG nova.compute.provider_tree [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 977.888619] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.901423] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.015s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.902591] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256140, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.955557] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256143, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.064447] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256151, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.244194] env[61545]: INFO nova.compute.manager [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Took 39.87 seconds to build instance. [ 978.379447] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256150, 'name': ReconfigVM_Task, 'duration_secs': 0.604419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.379749] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 665db895-52ce-4e7c-9a78-86db5b695534/665db895-52ce-4e7c-9a78-86db5b695534.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.380439] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d39f336-e819-458a-be24-0cc7a6471371 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.387200] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 978.387200] env[61545]: value = "task-4256152" [ 978.387200] env[61545]: _type = "Task" [ 978.387200] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.403446] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256140, 'name': CloneVM_Task, 'duration_secs': 2.702449} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.406755] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Created linked-clone VM from snapshot [ 978.407094] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256152, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.407967] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48153863-fbc2-4684-900c-98803d5ef90f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.416875] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Uploading image 11264cae-fabe-40dd-a3f5-f9c69800f45c {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 978.426574] env[61545]: DEBUG nova.scheduler.client.report [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 978.426905] env[61545]: DEBUG nova.compute.provider_tree [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 95 to 96 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 978.427233] env[61545]: DEBUG nova.compute.provider_tree [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 978.457365] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256143, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.459675] env[61545]: DEBUG oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 978.459675] env[61545]: value = "vm-838736" [ 978.459675] env[61545]: _type = "VirtualMachine" [ 978.459675] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 978.462676] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9fb7a9dd-c22d-46f8-838f-a5267999ea52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.471595] env[61545]: DEBUG oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease: (returnval){ [ 978.471595] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cafba3-eb5e-b134-7128-6452f23cf228" [ 978.471595] env[61545]: _type = "HttpNfcLease" [ 978.471595] env[61545]: } obtained for exporting VM: (result){ [ 978.471595] env[61545]: value = "vm-838736" [ 978.471595] env[61545]: _type = "VirtualMachine" [ 978.471595] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 978.471917] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the lease: (returnval){ [ 978.471917] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cafba3-eb5e-b134-7128-6452f23cf228" [ 978.471917] env[61545]: _type = "HttpNfcLease" [ 978.471917] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 978.481965] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.481965] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cafba3-eb5e-b134-7128-6452f23cf228" [ 978.481965] env[61545]: _type = "HttpNfcLease" [ 978.481965] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 978.567047] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256151, 'name': CreateVM_Task, 'duration_secs': 0.792825} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.567047] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 978.567751] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.568427] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.568843] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 978.569254] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0222653f-33a1-449c-84bf-b0defd4d48fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.575796] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 978.575796] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529f8d80-9d16-c957-caad-10283213f9bb" [ 978.575796] env[61545]: _type = "Task" [ 978.575796] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.590993] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529f8d80-9d16-c957-caad-10283213f9bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.706844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] Acquiring lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.707049] env[61545]: DEBUG oslo_concurrency.lockutils [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] Acquired lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.707234] env[61545]: DEBUG nova.network.neutron [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.746508] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ffb2bada-2ab3-4a1d-838d-57b8c60dec89 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "d980f421-03b5-4b0e-b547-a33031356d55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.809s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.899086] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256152, 'name': Rename_Task, 'duration_secs': 0.268744} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.899086] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.899086] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e3366e0-c103-48da-89dd-bf3c883c1f82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.906668] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 978.906668] env[61545]: value = "task-4256155" [ 978.906668] env[61545]: _type = "Task" [ 978.906668] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.917715] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.934454] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.245s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.936094] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.482s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.936318] env[61545]: DEBUG nova.objects.instance [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 978.954874] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256143, 'name': CloneVM_Task, 'duration_secs': 2.450941} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.956475] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Created linked-clone VM from snapshot [ 978.958813] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7870ead-4c82-4924-88bc-8f931c67810e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.969546] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Uploading image 59ed3cb3-d35d-4df7-8ff8-ffc462b36b78 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 978.981670] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.981670] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cafba3-eb5e-b134-7128-6452f23cf228" [ 978.981670] env[61545]: _type = "HttpNfcLease" [ 978.981670] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 978.982039] env[61545]: DEBUG oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 978.982039] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cafba3-eb5e-b134-7128-6452f23cf228" [ 978.982039] env[61545]: _type = "HttpNfcLease" [ 978.982039] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 978.983199] env[61545]: INFO nova.scheduler.client.report [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleted allocations for instance 9b62358e-c834-461c-9954-49f513b0f4ac [ 978.984852] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb263f7b-0302-43f0-960c-2e47301eae36 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.992978] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.993254] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.993493] env[61545]: INFO nova.compute.manager [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Attaching volume d618a534-2f80-4250-a05c-4b8bb6a7e98a to /dev/sdb [ 979.003142] env[61545]: DEBUG oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d31e0b-6f21-f540-9296-a123bcd86d3e/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 979.003330] env[61545]: DEBUG oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d31e0b-6f21-f540-9296-a123bcd86d3e/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 979.009467] env[61545]: DEBUG oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 979.009467] env[61545]: value = "vm-838738" [ 979.009467] env[61545]: _type = "VirtualMachine" [ 979.009467] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 979.010121] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0c1e2eb8-e62a-48dd-a3b0-9d85d66297c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.069370] env[61545]: DEBUG oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lease: (returnval){ [ 979.069370] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1fe92-68ac-4756-5ecd-0f68f31e0585" [ 979.069370] env[61545]: _type = "HttpNfcLease" [ 979.069370] env[61545]: } obtained for exporting VM: (result){ [ 979.069370] env[61545]: value = "vm-838738" [ 979.069370] env[61545]: _type = "VirtualMachine" [ 979.069370] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 979.069652] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the lease: (returnval){ [ 979.069652] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1fe92-68ac-4756-5ecd-0f68f31e0585" [ 979.069652] env[61545]: _type = "HttpNfcLease" [ 979.069652] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 979.070426] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682d0c2f-3f52-4e85-ba3c-1a0b5e8161ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.086599] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fcdadf0-369a-4b5b-ab6f-187ce2854554 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.094254] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.094254] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1fe92-68ac-4756-5ecd-0f68f31e0585" [ 979.094254] env[61545]: _type = "HttpNfcLease" [ 979.094254] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 979.094254] env[61545]: DEBUG oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 979.094254] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b1fe92-68ac-4756-5ecd-0f68f31e0585" [ 979.094254] env[61545]: _type = "HttpNfcLease" [ 979.094254] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 979.094254] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529f8d80-9d16-c957-caad-10283213f9bb, 'name': SearchDatastore_Task, 'duration_secs': 0.011907} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.095122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94f4187-f84e-4591-9e42-74f3d7fb90f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.097778] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.098018] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.098337] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.098473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.098653] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.098934] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d92b414-860a-46db-bcb4-7dad352ab259 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.106905] env[61545]: DEBUG nova.virt.block_device [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Updating existing volume attachment record: b954f466-96f3-4211-8946-925dbb21971b {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 979.113578] env[61545]: DEBUG oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5254a8be-a643-86b3-f49a-ad495c240001/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 979.113780] env[61545]: DEBUG oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5254a8be-a643-86b3-f49a-ad495c240001/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 979.116051] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.116298] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.118083] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae3374d6-fba0-4806-8248-9123c0cc2b11 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.183630] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-65b154fe-58c6-4ac9-96cb-8a82f704887e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.187184] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 979.187184] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f42e7-5216-9379-f6a7-12b55debc804" [ 979.187184] env[61545]: _type = "Task" [ 979.187184] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.197411] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f42e7-5216-9379-f6a7-12b55debc804, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.231558] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cfdc12e0-b37c-40d0-b900-98b5df8d05e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.419203] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256155, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.501125] env[61545]: DEBUG oslo_concurrency.lockutils [None req-93c59ed0-d893-4e4f-b3ef-e7bdb6bcf896 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "9b62358e-c834-461c-9954-49f513b0f4ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.324s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.663730] env[61545]: DEBUG nova.network.neutron [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Updating instance_info_cache with network_info: [{"id": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "address": "fa:16:3e:5f:b3:02", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeae1f8ca-5f", "ovs_interfaceid": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.702307] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f42e7-5216-9379-f6a7-12b55debc804, 'name': SearchDatastore_Task, 'duration_secs': 0.011971} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.703718] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d95f2a0-7199-41d9-a349-2d8f20a05720 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.712800] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 979.712800] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527719c3-c99f-468e-ae3b-28f128f3b003" [ 979.712800] env[61545]: _type = "Task" [ 979.712800] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.724849] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527719c3-c99f-468e-ae3b-28f128f3b003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.920783] env[61545]: DEBUG oslo_vmware.api [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256155, 'name': PowerOnVM_Task, 'duration_secs': 0.740359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.922018] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.922459] env[61545]: INFO nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Took 10.85 seconds to spawn the instance on the hypervisor. [ 979.923068] env[61545]: DEBUG nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.924290] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48ce9eb-62e0-4b2e-806f-6731a9422b98 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.949511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e004db83-ec31-422a-a1e8-f14984e45104 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.952350] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.474s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.953183] env[61545]: DEBUG nova.objects.instance [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lazy-loading 'resources' on Instance uuid d7e25ea6-7076-4ab2-aed6-fe5232c2665d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.079807] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.080330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.081028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "62301196-fb8a-45fe-9193-0ad8f7126ab5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.081586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.082134] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.085102] env[61545]: INFO nova.compute.manager [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Terminating instance [ 980.167221] env[61545]: DEBUG oslo_concurrency.lockutils [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] Releasing lock "refresh_cache-d980f421-03b5-4b0e-b547-a33031356d55" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.167885] env[61545]: DEBUG nova.compute.manager [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Inject network info {{(pid=61545) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 980.168725] env[61545]: DEBUG nova.compute.manager [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] [instance: d980f421-03b5-4b0e-b547-a33031356d55] network_info to inject: |[{"id": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "address": "fa:16:3e:5f:b3:02", "network": {"id": "97a1c04b-c7f7-4ecc-b8e4-8bb8eab2935f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-160888326-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae60c9c1b7804134b570d0384dc85ea5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeae1f8ca-5f", "ovs_interfaceid": "eae1f8ca-5ff6-4f95-8ff5-9a4452601a17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 980.174444] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Reconfiguring VM instance to set the machine id {{(pid=61545) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 980.175792] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c22c593b-6b7d-4bc6-af8b-714d4bb8842f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.199276] env[61545]: DEBUG oslo_vmware.api [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] Waiting for the task: (returnval){ [ 980.199276] env[61545]: value = "task-4256161" [ 980.199276] env[61545]: _type = "Task" [ 980.199276] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.209952] env[61545]: DEBUG oslo_vmware.api [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] Task: {'id': task-4256161, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.225560] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527719c3-c99f-468e-ae3b-28f128f3b003, 'name': SearchDatastore_Task, 'duration_secs': 0.014215} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.225988] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.226613] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fff833ad-55af-4702-859b-05f94cac18c8/fff833ad-55af-4702-859b-05f94cac18c8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 980.227185] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fb13d8f-fa46-42ab-903b-e34df3dd88ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.238652] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 980.238652] env[61545]: value = "task-4256162" [ 980.238652] env[61545]: _type = "Task" [ 980.238652] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.251092] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.454830] env[61545]: INFO nova.compute.manager [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Took 41.01 seconds to build instance. [ 980.590267] env[61545]: DEBUG nova.compute.manager [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 980.590533] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 980.591582] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cdf2a0-b077-43b3-8849-fb8c22faf28a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.606451] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.606451] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-202e7f3f-4dd4-42d6-9b39-56295abd2dc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.616764] env[61545]: DEBUG oslo_vmware.api [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 980.616764] env[61545]: value = "task-4256163" [ 980.616764] env[61545]: _type = "Task" [ 980.616764] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.630236] env[61545]: DEBUG oslo_vmware.api [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.714511] env[61545]: DEBUG oslo_vmware.api [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] Task: {'id': task-4256161, 'name': ReconfigVM_Task, 'duration_secs': 0.202101} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.714995] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-74672fa4-db8a-477b-be58-74c87fb0d733 tempest-ServersAdminTestJSON-824984430 tempest-ServersAdminTestJSON-824984430-project-admin] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Reconfigured VM instance to set the machine id {{(pid=61545) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 980.753444] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256162, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.931378] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20142a3d-13ae-4570-92e4-9a5ce6aebfc9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.943441] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af10291-aa25-4e83-8198-1cdf7d7070c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.978196] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38fe0b8d-f883-400b-9ee5-52c178c8af17 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "665db895-52ce-4e7c-9a78-86db5b695534" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.349s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.979170] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b520892b-82bf-4027-9730-d4ff79b7dcdc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.988460] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8898fe7a-ef8a-4211-bceb-20910de058d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.006050] env[61545]: DEBUG nova.compute.provider_tree [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.126927] env[61545]: DEBUG oslo_vmware.api [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.254070] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644785} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.254474] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fff833ad-55af-4702-859b-05f94cac18c8/fff833ad-55af-4702-859b-05f94cac18c8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 981.254798] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.255481] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-730ae293-0b65-4802-8719-159a03072e1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.264448] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 981.264448] env[61545]: value = "task-4256164" [ 981.264448] env[61545]: _type = "Task" [ 981.264448] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.276152] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256164, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.510233] env[61545]: DEBUG nova.scheduler.client.report [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.630571] env[61545]: DEBUG oslo_vmware.api [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256163, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.632296] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 981.632662] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838740', 'volume_id': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'name': 'volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5ba53915-ab57-493e-b2e1-7f3d1b3845ee', 'attached_at': '', 'detached_at': '', 'volume_id': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'serial': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 981.633745] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc37624-ef0f-44ed-9978-2460e2ad4a8d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.654110] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ba888a-c55d-41bf-bc43-7ae6b1e51502 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.686271] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9/volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 981.686812] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98f8aac2-176c-4467-8b28-af423c5ce101 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.707901] env[61545]: DEBUG oslo_vmware.api [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 981.707901] env[61545]: value = "task-4256166" [ 981.707901] env[61545]: _type = "Task" [ 981.707901] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.717583] env[61545]: DEBUG oslo_vmware.api [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.774673] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256164, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081049} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.774976] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 981.775838] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1da2ec6-9f5e-4fce-8244-6f50e95cde6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.804444] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] fff833ad-55af-4702-859b-05f94cac18c8/fff833ad-55af-4702-859b-05f94cac18c8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 981.804814] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6db2adf5-4a22-48af-ab73-905fc8cfac6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.826881] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 981.826881] env[61545]: value = "task-4256167" [ 981.826881] env[61545]: _type = "Task" [ 981.826881] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.837280] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256167, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.017677] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.065s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.020483] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.224s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.022615] env[61545]: INFO nova.compute.claims [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.049926] env[61545]: INFO nova.scheduler.client.report [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Deleted allocations for instance d7e25ea6-7076-4ab2-aed6-fe5232c2665d [ 982.128682] env[61545]: DEBUG oslo_vmware.api [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256163, 'name': PowerOffVM_Task, 'duration_secs': 1.086621} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.128953] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.129142] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.129406] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5cf41a6f-7c13-4492-a9a6-c5ab2c9dcc29 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.207298] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.207611] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.207877] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleting the datastore file [datastore2] 62301196-fb8a-45fe-9193-0ad8f7126ab5 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.208310] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8be04a6e-16c3-43ca-8294-3fed5609959d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.221228] env[61545]: DEBUG oslo_vmware.api [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256166, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.222917] env[61545]: DEBUG oslo_vmware.api [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for the task: (returnval){ [ 982.222917] env[61545]: value = "task-4256169" [ 982.222917] env[61545]: _type = "Task" [ 982.222917] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.232330] env[61545]: DEBUG oslo_vmware.api [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.338563] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256167, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.562055] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a1d7ad4-d6da-4a20-86d8-c47264d999e0 tempest-VolumesAdminNegativeTest-2067435148 tempest-VolumesAdminNegativeTest-2067435148-project-member] Lock "d7e25ea6-7076-4ab2-aed6-fe5232c2665d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.352s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.718921] env[61545]: DEBUG oslo_vmware.api [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256166, 'name': ReconfigVM_Task, 'duration_secs': 0.593535} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.719242] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Reconfigured VM instance instance-00000046 to attach disk [datastore1] volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9/volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.724093] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78263eb8-a548-4227-8ea4-637ee8f6c4a4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.746652] env[61545]: DEBUG oslo_vmware.api [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Task: {'id': task-4256169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311339} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.747481] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.747481] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.747481] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.747738] env[61545]: INFO nova.compute.manager [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Took 2.16 seconds to destroy the instance on the hypervisor. [ 982.748387] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.749907] env[61545]: DEBUG nova.compute.manager [-] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.750064] env[61545]: DEBUG nova.network.neutron [-] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.752242] env[61545]: DEBUG oslo_vmware.api [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 982.752242] env[61545]: value = "task-4256170" [ 982.752242] env[61545]: _type = "Task" [ 982.752242] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.763816] env[61545]: DEBUG oslo_vmware.api [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256170, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.839508] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256167, 'name': ReconfigVM_Task, 'duration_secs': 0.560063} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.839901] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Reconfigured VM instance instance-00000049 to attach disk [datastore2] fff833ad-55af-4702-859b-05f94cac18c8/fff833ad-55af-4702-859b-05f94cac18c8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.840585] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2ae2093-5a97-4b79-82f4-821757f2c24c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.848894] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 982.848894] env[61545]: value = "task-4256171" [ 982.848894] env[61545]: _type = "Task" [ 982.848894] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.858068] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256171, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.198411] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Acquiring lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.198776] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.266040] env[61545]: DEBUG oslo_vmware.api [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256170, 'name': ReconfigVM_Task, 'duration_secs': 0.212721} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.266482] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838740', 'volume_id': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'name': 'volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5ba53915-ab57-493e-b2e1-7f3d1b3845ee', 'attached_at': '', 'detached_at': '', 'volume_id': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'serial': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 983.362044] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256171, 'name': Rename_Task, 'duration_secs': 0.227812} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.362336] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.362597] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10c9ec55-83a6-4be6-898f-0b8c62c82043 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.372068] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 983.372068] env[61545]: value = "task-4256172" [ 983.372068] env[61545]: _type = "Task" [ 983.372068] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.382685] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.536079] env[61545]: DEBUG nova.network.neutron [-] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.552423] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2754540-6b49-46bc-a72a-7fc46e89e5ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.561954] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552613a3-cd82-4efc-96d8-826681cd23d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.599102] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8251c6b7-aca7-48e8-9a48-84d93761f27d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.611404] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8749446b-0020-4b07-a5ef-0bce84e42989 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.629698] env[61545]: DEBUG nova.compute.provider_tree [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 983.691174] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 983.691174] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838743', 'volume_id': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a', 'name': 'volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b1277c3b-cd7b-43be-9eff-640145dde5e5', 'attached_at': '', 'detached_at': '', 'volume_id': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a', 'serial': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 983.691972] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2286ca70-7d13-4865-a5e0-8e3e19ba283c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.709259] env[61545]: DEBUG nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 983.713304] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3972f6f-8a9a-4cad-8f9e-7f5a6f1869cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.741289] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a/volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.741822] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07c9cd17-55c7-46d7-aee7-2e32576c11cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.764232] env[61545]: DEBUG oslo_vmware.api [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 983.764232] env[61545]: value = "task-4256173" [ 983.764232] env[61545]: _type = "Task" [ 983.764232] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.776675] env[61545]: DEBUG oslo_vmware.api [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256173, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.883567] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.040258] env[61545]: INFO nova.compute.manager [-] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Took 1.29 seconds to deallocate network for instance. [ 984.162445] env[61545]: ERROR nova.scheduler.client.report [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [req-6d0b23f2-c57d-4eb1-801d-1a245eff3479] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6d0b23f2-c57d-4eb1-801d-1a245eff3479"}]} [ 984.182987] env[61545]: DEBUG nova.scheduler.client.report [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 984.200146] env[61545]: DEBUG nova.scheduler.client.report [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 984.200404] env[61545]: DEBUG nova.compute.provider_tree [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 243, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 984.217078] env[61545]: DEBUG nova.scheduler.client.report [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 984.240299] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.242498] env[61545]: DEBUG nova.scheduler.client.report [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 984.277432] env[61545]: DEBUG oslo_vmware.api [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256173, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.320450] env[61545]: DEBUG nova.objects.instance [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'flavor' on Instance uuid 5ba53915-ab57-493e-b2e1-7f3d1b3845ee {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.388015] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.548858] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.689020] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893e7dbd-1ae8-4c05-a5ab-c3bd0ba0d2d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.700092] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c01dfa-0517-4bee-a4d1-fcb21ca6244e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.735169] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac060b22-68f0-43cc-80c1-02a0cbc97bdc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.744799] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3e8f07-8f38-416d-889e-d46b898dc72f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.766421] env[61545]: DEBUG nova.compute.provider_tree [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 984.782115] env[61545]: DEBUG oslo_vmware.api [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256173, 'name': ReconfigVM_Task, 'duration_secs': 0.718631} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.783496] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Reconfigured VM instance instance-0000003d to attach disk [datastore1] volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a/volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.789783] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01a9bba0-f970-4c22-974c-aaf2ea99bbe7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.808073] env[61545]: DEBUG oslo_vmware.api [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 984.808073] env[61545]: value = "task-4256174" [ 984.808073] env[61545]: _type = "Task" [ 984.808073] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.819734] env[61545]: DEBUG oslo_vmware.api [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256174, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.826911] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a31e2d2f-f40f-4675-a1ec-764259cd4b8d tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.349s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.888229] env[61545]: DEBUG oslo_vmware.api [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256172, 'name': PowerOnVM_Task, 'duration_secs': 1.448612} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.888595] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.889024] env[61545]: INFO nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Took 12.98 seconds to spawn the instance on the hypervisor. [ 984.889113] env[61545]: DEBUG nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 984.890280] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf98b5c-ade9-48c9-b161-ca6d86392f56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.306834] env[61545]: DEBUG nova.scheduler.client.report [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 985.307211] env[61545]: DEBUG nova.compute.provider_tree [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 97 to 98 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 985.307525] env[61545]: DEBUG nova.compute.provider_tree [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.321913] env[61545]: DEBUG oslo_vmware.api [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256174, 'name': ReconfigVM_Task, 'duration_secs': 0.231835} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.322330] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838743', 'volume_id': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a', 'name': 'volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b1277c3b-cd7b-43be-9eff-640145dde5e5', 'attached_at': '', 'detached_at': '', 'volume_id': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a', 'serial': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 985.413894] env[61545]: INFO nova.compute.manager [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Took 37.00 seconds to build instance. [ 985.817235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.797s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.817929] env[61545]: DEBUG nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 985.821299] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.867s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.822880] env[61545]: INFO nova.compute.claims [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.916065] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6d1cdfd-af99-4177-bbce-d5488be857d5 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "fff833ad-55af-4702-859b-05f94cac18c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.525s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.328229] env[61545]: DEBUG nova.compute.utils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 986.329811] env[61545]: DEBUG nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 986.330038] env[61545]: DEBUG nova.network.neutron [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 986.367250] env[61545]: DEBUG nova.objects.instance [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'flavor' on Instance uuid b1277c3b-cd7b-43be-9eff-640145dde5e5 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.444942] env[61545]: DEBUG nova.policy [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82de8ada56cd46319fe4c7ecd4957abb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da39b1ee6df640b89a9dab58e3380397', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 986.771976] env[61545]: DEBUG nova.network.neutron [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Successfully created port: aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 986.836770] env[61545]: DEBUG nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 986.877831] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34246e6b-c4ef-44c3-a179-d0954abf9e83 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.884s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.249331] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761a6a53-d2a8-4b6e-890f-5ab158218224 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.258081] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1cc53b-e870-46d5-9391-4282e24fe560 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.289125] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c460b0ff-bb3e-458b-baf6-6e4b6bfb6e3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.297545] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e825c8d8-302e-4c0c-98a1-0b7e5419a816 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.317262] env[61545]: DEBUG nova.compute.provider_tree [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.821399] env[61545]: DEBUG nova.scheduler.client.report [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 987.853631] env[61545]: DEBUG nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 988.328527] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.329193] env[61545]: DEBUG nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 988.333421] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.623s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.333730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.337505] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.646s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.337738] env[61545]: DEBUG nova.objects.instance [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lazy-loading 'resources' on Instance uuid 8ab168cb-b0a9-403c-bdb5-b96c6d319baf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.370082] env[61545]: INFO nova.scheduler.client.report [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Deleted allocations for instance dad53420-37f1-42ef-b0d3-e35c73b97417 [ 988.428128] env[61545]: DEBUG nova.network.neutron [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Successfully updated port: aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.841176] env[61545]: DEBUG nova.compute.utils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 988.845464] env[61545]: DEBUG nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 988.845632] env[61545]: DEBUG nova.network.neutron [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 988.876805] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d3d25e4a-0bf3-4ebd-9dfd-d2a129960f31 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "dad53420-37f1-42ef-b0d3-e35c73b97417" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.050s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.895955] env[61545]: DEBUG nova.policy [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aad2b7980c3f47249beeaeb719cd9399', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b1b012a7a97a46309725d337c78910b5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 988.931179] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.931179] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.931179] env[61545]: DEBUG nova.network.neutron [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 989.000325] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 989.000605] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.000728] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 989.000895] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.001163] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 989.001360] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 989.002113] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 989.002113] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 989.003966] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 989.003966] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 989.003966] env[61545]: DEBUG nova.virt.hardware [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 989.006662] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04e5297-4f80-4a0f-a296-1d6a726d18da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.017297] env[61545]: DEBUG oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5254a8be-a643-86b3-f49a-ad495c240001/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 989.018336] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96e6a8e-b737-4f18-bd16-fe16f5f73d58 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.027436] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622dacef-5d22-4534-a930-b2b64050cb5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.038705] env[61545]: DEBUG oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5254a8be-a643-86b3-f49a-ad495c240001/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 989.038705] env[61545]: ERROR oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5254a8be-a643-86b3-f49a-ad495c240001/disk-0.vmdk due to incomplete transfer. [ 989.038705] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2de72947-ad4c-48f2-8247-7a3dad7f3376 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.055381] env[61545]: DEBUG oslo_vmware.rw_handles [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5254a8be-a643-86b3-f49a-ad495c240001/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 989.055640] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Uploaded image 59ed3cb3-d35d-4df7-8ff8-ffc462b36b78 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 989.057611] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 989.058249] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-02ab37e1-e46a-423f-8d7c-0b7797c7da7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.066580] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 989.066580] env[61545]: value = "task-4256175" [ 989.066580] env[61545]: _type = "Task" [ 989.066580] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.083724] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256175, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.276854] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a814b4c-a57d-4b0a-9455-5aa82b1050d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.285381] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f01d907-e9f5-45e0-845c-a33fa54142e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.318824] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f581e0-d4f5-4631-978a-83c5ed6f6082 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.329025] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b43046-68fa-4cb6-9e90-bb100d0b5dea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.343375] env[61545]: DEBUG nova.compute.provider_tree [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.346216] env[61545]: DEBUG nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 989.367506] env[61545]: DEBUG nova.network.neutron [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Successfully created port: 039bac9c-919b-4727-8313-ea1206afb5ec {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 989.510408] env[61545]: DEBUG nova.network.neutron [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 989.531023] env[61545]: DEBUG nova.compute.manager [req-305d8e66-34bd-4a4f-9c9c-5c5bdb56f6f9 req-307181b6-fd6f-4d0a-8315-1358af7c8329 service nova] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Received event network-vif-deleted-52cec3f8-5316-4f38-86e3-82087b8e5fac {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 989.581059] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256175, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.626179] env[61545]: INFO nova.compute.manager [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Rebuilding instance [ 989.686583] env[61545]: DEBUG nova.compute.manager [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.687608] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a6cb4b-d105-47d6-afeb-c8184e35bd25 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.726581] env[61545]: DEBUG oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d31e0b-6f21-f540-9296-a123bcd86d3e/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 989.727993] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad21a72f-4d67-47af-977d-d8f9db544e9a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.735923] env[61545]: DEBUG oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d31e0b-6f21-f540-9296-a123bcd86d3e/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 989.736020] env[61545]: ERROR oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d31e0b-6f21-f540-9296-a123bcd86d3e/disk-0.vmdk due to incomplete transfer. [ 989.736283] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-241d1260-ac4d-4195-911c-b939d55c2b7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.744837] env[61545]: DEBUG oslo_vmware.rw_handles [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d31e0b-6f21-f540-9296-a123bcd86d3e/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 989.745060] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Uploaded image 11264cae-fabe-40dd-a3f5-f9c69800f45c to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 989.748118] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 989.748412] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6ddc3370-e6b7-4943-b8b7-ce1f2327445f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.758314] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 989.758314] env[61545]: value = "task-4256176" [ 989.758314] env[61545]: _type = "Task" [ 989.758314] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.769143] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256176, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.847681] env[61545]: DEBUG nova.scheduler.client.report [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.991286] env[61545]: DEBUG nova.network.neutron [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updating instance_info_cache with network_info: [{"id": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "address": "fa:16:3e:3d:9f:82", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaacdaf9b-95", "ovs_interfaceid": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.078774] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256175, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.271243] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256176, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.336141] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.336141] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.336141] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.336141] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.336141] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.338996] env[61545]: INFO nova.compute.manager [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Terminating instance [ 990.362292] env[61545]: DEBUG nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 990.368948] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.376659] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.134s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.379828] env[61545]: INFO nova.compute.claims [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.421838] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 990.422858] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.422858] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 990.423082] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.423343] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 990.423596] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 990.424054] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 990.424192] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 990.424472] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 990.425562] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 990.425562] env[61545]: DEBUG nova.virt.hardware [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 990.426484] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856c5999-0636-4183-89c7-5dd4672634ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.431595] env[61545]: INFO nova.scheduler.client.report [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Deleted allocations for instance 8ab168cb-b0a9-403c-bdb5-b96c6d319baf [ 990.443315] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bbba76-33af-4532-8978-e9f4c82547f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.498063] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.498472] env[61545]: DEBUG nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Instance network_info: |[{"id": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "address": "fa:16:3e:3d:9f:82", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaacdaf9b-95", "ovs_interfaceid": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 990.498905] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:9f:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aacdaf9b-9518-4298-a1df-ce1c3931e8e2', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 990.507535] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Creating folder: Project (da39b1ee6df640b89a9dab58e3380397). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 990.508205] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd87be32-80ad-4ced-b3ce-23fe0f71ad86 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.520525] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Created folder: Project (da39b1ee6df640b89a9dab58e3380397) in parent group-v838542. [ 990.520726] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Creating folder: Instances. Parent ref: group-v838744. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 990.521011] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e17db82e-a58b-4418-ab74-16fdc3a0858a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.534593] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Created folder: Instances in parent group-v838744. [ 990.534806] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 990.535875] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0f42893-3332-4027-93df-bb46e3350485] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 990.535875] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-295bb188-a7fc-4e34-8c6c-94f19fa0f7a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.556439] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 990.556439] env[61545]: value = "task-4256179" [ 990.556439] env[61545]: _type = "Task" [ 990.556439] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.565021] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256179, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.578437] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256175, 'name': Destroy_Task, 'duration_secs': 1.150505} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.578765] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Destroyed the VM [ 990.579024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 990.579291] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-090a6c49-5664-469f-ab9a-e33e5637c3ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.587877] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 990.587877] env[61545]: value = "task-4256180" [ 990.587877] env[61545]: _type = "Task" [ 990.587877] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.599435] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256180, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.708485] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.708940] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcd6ddc5-9c93-497b-9708-bcce812c156d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.717743] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 990.717743] env[61545]: value = "task-4256181" [ 990.717743] env[61545]: _type = "Task" [ 990.717743] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.733669] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.771062] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256176, 'name': Destroy_Task, 'duration_secs': 0.931354} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.771309] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Destroyed the VM [ 990.771576] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 990.771807] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3de16485-d5ca-4bc5-b8d3-c5fd609f047d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.780441] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 990.780441] env[61545]: value = "task-4256182" [ 990.780441] env[61545]: _type = "Task" [ 990.780441] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.793624] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256182, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.802681] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "fff833ad-55af-4702-859b-05f94cac18c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.802996] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "fff833ad-55af-4702-859b-05f94cac18c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.803431] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "fff833ad-55af-4702-859b-05f94cac18c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.803580] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "fff833ad-55af-4702-859b-05f94cac18c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.803813] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "fff833ad-55af-4702-859b-05f94cac18c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.808246] env[61545]: INFO nova.compute.manager [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Terminating instance [ 990.849081] env[61545]: DEBUG nova.compute.manager [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 990.849647] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.850188] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9b38fcc-fd4e-4b51-aa8a-73f6f98f8f60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.862367] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 990.862367] env[61545]: value = "task-4256183" [ 990.862367] env[61545]: _type = "Task" [ 990.862367] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.876387] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.949798] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d68f5bd-c985-4ad7-8f13-b96f147887f1 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "8ab168cb-b0a9-403c-bdb5-b96c6d319baf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.895s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.068906] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256179, 'name': CreateVM_Task, 'duration_secs': 0.40776} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.069321] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0f42893-3332-4027-93df-bb46e3350485] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 991.069891] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.070081] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.070425] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 991.070713] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8a41cd4-ec61-49f8-8a59-9459bfcdcf72 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.076879] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 991.076879] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524fe7d9-ced8-f63c-f5d2-d819796a36e2" [ 991.076879] env[61545]: _type = "Task" [ 991.076879] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.088846] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524fe7d9-ced8-f63c-f5d2-d819796a36e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.100780] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256180, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.176199] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.176463] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.228924] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256181, 'name': PowerOffVM_Task, 'duration_secs': 0.254418} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.229315] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.229553] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.230360] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94326aaa-68d7-4d52-b21d-bfc690a8f0d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.238787] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.238787] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-649c1178-bbb4-466c-9a19-97b505258cfa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.291049] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256182, 'name': RemoveSnapshot_Task} progress is 61%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.300851] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.301101] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.301272] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleting the datastore file [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.301555] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f82b839-36fa-4668-8755-fba74744fdf8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.309062] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 991.309062] env[61545]: value = "task-4256185" [ 991.309062] env[61545]: _type = "Task" [ 991.309062] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.319255] env[61545]: DEBUG nova.compute.manager [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 991.319493] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.319797] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.320565] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbeba947-ed53-403b-bf83-4b2be810df24 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.329009] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.329300] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b84da38-ade4-478d-8b27-cc5283c92545 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.337226] env[61545]: DEBUG oslo_vmware.api [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 991.337226] env[61545]: value = "task-4256186" [ 991.337226] env[61545]: _type = "Task" [ 991.337226] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.347024] env[61545]: DEBUG oslo_vmware.api [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256186, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.373499] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256183, 'name': PowerOffVM_Task, 'duration_secs': 0.257047} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.373811] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.374055] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 991.374262] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838740', 'volume_id': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'name': 'volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5ba53915-ab57-493e-b2e1-7f3d1b3845ee', 'attached_at': '', 'detached_at': '', 'volume_id': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'serial': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 991.375138] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6fb7c3-5090-4033-94ca-f4d6847e4159 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.406323] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c70799-1bdf-4529-a649-0d320e5cc611 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.414749] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b77102-f1a8-47a8-a32c-41268a0bd045 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.441615] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f1614e-c479-42ce-9247-191d2e30030d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.466241] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] The volume has not been displaced from its original location: [datastore1] volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9/volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 991.472338] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Reconfiguring VM instance instance-00000046 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 991.475606] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48fea4ab-d90c-431b-b924-79704e0c4c4a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.497618] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 991.497618] env[61545]: value = "task-4256187" [ 991.497618] env[61545]: _type = "Task" [ 991.497618] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.512916] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.592725] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524fe7d9-ced8-f63c-f5d2-d819796a36e2, 'name': SearchDatastore_Task, 'duration_secs': 0.012334} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.598300] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.598607] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.598874] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.599047] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.599250] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.606726] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df6ba47d-2330-4a00-be21-ad6e179e6c1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.610859] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.611307] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.624058] env[61545]: DEBUG oslo_vmware.api [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256180, 'name': RemoveSnapshot_Task, 'duration_secs': 0.962176} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.627208] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 991.627563] env[61545]: INFO nova.compute.manager [None req-86f8f970-d349-4e08-933b-8b053a9c6f7c tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Took 17.81 seconds to snapshot the instance on the hypervisor. [ 991.630456] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.630892] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.632409] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa3b0370-9058-41b0-aac6-2c3a757385a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.645344] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 991.645344] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e72f99-efef-cf79-bc80-5bbd125dc802" [ 991.645344] env[61545]: _type = "Task" [ 991.645344] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.665332] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e72f99-efef-cf79-bc80-5bbd125dc802, 'name': SearchDatastore_Task, 'duration_secs': 0.011835} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.666965] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81748f88-97a0-4bda-b5d1-e12982005f81 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.675801] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 991.675801] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251e4ad-ef33-7d17-d72e-62efd92bbfdc" [ 991.675801] env[61545]: _type = "Task" [ 991.675801] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.684787] env[61545]: DEBUG nova.compute.utils [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 991.693818] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251e4ad-ef33-7d17-d72e-62efd92bbfdc, 'name': SearchDatastore_Task, 'duration_secs': 0.011957} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.694125] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.694676] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d0f42893-3332-4027-93df-bb46e3350485/d0f42893-3332-4027-93df-bb46e3350485.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 991.694676] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26f6660a-abe9-4d02-9d94-105946791f2c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.702780] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 991.702780] env[61545]: value = "task-4256188" [ 991.702780] env[61545]: _type = "Task" [ 991.702780] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.715457] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256188, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.794512] env[61545]: DEBUG oslo_vmware.api [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256182, 'name': RemoveSnapshot_Task, 'duration_secs': 0.878513} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.794870] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 991.795165] env[61545]: INFO nova.compute.manager [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Took 19.04 seconds to snapshot the instance on the hypervisor. [ 991.824261] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261285} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.824548] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.824731] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 991.824923] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 991.847477] env[61545]: DEBUG oslo_vmware.api [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256186, 'name': PowerOffVM_Task, 'duration_secs': 0.240535} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.851063] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.851258] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.851815] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0d62c4e-2153-4afc-910a-797bb5acefdc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.942978] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.943404] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.943726] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleting the datastore file [datastore2] fff833ad-55af-4702-859b-05f94cac18c8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.944141] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a74b31f-fc75-4d16-be9c-80a18c2df64c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.953599] env[61545]: DEBUG oslo_vmware.api [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 991.953599] env[61545]: value = "task-4256190" [ 991.953599] env[61545]: _type = "Task" [ 991.953599] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.966631] env[61545]: DEBUG oslo_vmware.api [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.972296] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3faab1-e697-4987-84c3-70d16025ad51 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.981574] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2a0ea7-9858-477b-9048-fe9c7eb593be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.021709] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9855598c-bffc-4c8d-aca1-4b9f8849f659 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.031220] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256187, 'name': ReconfigVM_Task, 'duration_secs': 0.235367} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.033893] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Reconfigured VM instance instance-00000046 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 992.041115] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce789601-8b60-445d-ad56-26cb45952737 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.054340] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b3faf3-8ded-4a37-b6a5-d126a86a3458 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.071998] env[61545]: DEBUG nova.compute.provider_tree [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.075339] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 992.075339] env[61545]: value = "task-4256191" [ 992.075339] env[61545]: _type = "Task" [ 992.075339] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.089116] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.116234] env[61545]: DEBUG nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 992.161959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "eced4107-b99e-479e-b22c-2157320ecf95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.161959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.161959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "eced4107-b99e-479e-b22c-2157320ecf95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.161959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.161959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.162322] env[61545]: INFO nova.compute.manager [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Terminating instance [ 992.180636] env[61545]: DEBUG nova.network.neutron [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Successfully updated port: 039bac9c-919b-4727-8313-ea1206afb5ec {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.194834] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.018s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.221968] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256188, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.361345] env[61545]: DEBUG nova.compute.manager [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Found 3 images (rotation: 2) {{(pid=61545) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 992.361566] env[61545]: DEBUG nova.compute.manager [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Rotating out 1 backups {{(pid=61545) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 992.361734] env[61545]: DEBUG nova.compute.manager [None req-39a8e4c3-e693-48ea-853e-9c3ec83b5c8e tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleting image 2416e430-378b-4a56-8d95-29053d0d652b {{(pid=61545) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 992.467846] env[61545]: DEBUG oslo_vmware.api [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.577527] env[61545]: DEBUG nova.scheduler.client.report [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.590226] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256191, 'name': ReconfigVM_Task, 'duration_secs': 0.34711} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.590522] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838740', 'volume_id': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'name': 'volume-9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5ba53915-ab57-493e-b2e1-7f3d1b3845ee', 'attached_at': '', 'detached_at': '', 'volume_id': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9', 'serial': '9a1dda2e-c1d4-43e7-be45-813fb72b01c9'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 992.590835] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.591635] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2ca675-159c-4137-b0fb-201ea60d46bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.600359] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.600517] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3c8cb3b-d044-4e8a-86d4-765cad6587da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.651270] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.666559] env[61545]: DEBUG nova.compute.manager [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 992.666809] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.667792] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185e17c2-cee4-4dfd-ab4e-60ffab8ce115 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.676670] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.676972] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-beb46159-b7ad-452c-82f9-c6eedecd11d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.684564] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquiring lock "refresh_cache-fed2c050-74e7-48f1-8a19-7c58e26d2159" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.684882] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquired lock "refresh_cache-fed2c050-74e7-48f1-8a19-7c58e26d2159" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.684966] env[61545]: DEBUG nova.network.neutron [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.688100] env[61545]: DEBUG oslo_vmware.api [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 992.688100] env[61545]: value = "task-4256193" [ 992.688100] env[61545]: _type = "Task" [ 992.688100] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.701111] env[61545]: DEBUG oslo_vmware.api [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.716216] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256188, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618085} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.716454] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] d0f42893-3332-4027-93df-bb46e3350485/d0f42893-3332-4027-93df-bb46e3350485.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 992.716663] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.716940] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc959373-644e-4d11-ade4-a797e58b5157 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.727738] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 992.727738] env[61545]: value = "task-4256194" [ 992.727738] env[61545]: _type = "Task" [ 992.727738] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.740725] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256194, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.755124] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.755124] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.755124] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleting the datastore file [datastore2] 5ba53915-ab57-493e-b2e1-7f3d1b3845ee {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.755662] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7163d2c-c796-409e-bc9d-a56758a5f9fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.764389] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 992.764389] env[61545]: value = "task-4256195" [ 992.764389] env[61545]: _type = "Task" [ 992.764389] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.778815] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.882097] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 992.882759] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.882759] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 992.883048] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.883048] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 992.883186] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 992.883367] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 992.883527] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 992.883689] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 992.883855] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 992.884046] env[61545]: DEBUG nova.virt.hardware [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 992.884980] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cc2aed-6c29-4c1d-993a-187fc981caac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.896573] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cce2c9-0d3c-4a7b-ab16-522a36335095 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.918860] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:ab:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd289bc06-c77a-460a-b15d-e94dcfb3ff53', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.926910] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 992.927271] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.927655] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3bfe841-a49c-4466-8e27-68fc2271ad84 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.954363] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.954363] env[61545]: value = "task-4256196" [ 992.954363] env[61545]: _type = "Task" [ 992.954363] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.967031] env[61545]: DEBUG nova.compute.manager [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Received event network-vif-plugged-aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 992.967031] env[61545]: DEBUG oslo_concurrency.lockutils [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] Acquiring lock "d0f42893-3332-4027-93df-bb46e3350485-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.967252] env[61545]: DEBUG oslo_concurrency.lockutils [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] Lock "d0f42893-3332-4027-93df-bb46e3350485-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.967562] env[61545]: DEBUG oslo_concurrency.lockutils [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] Lock "d0f42893-3332-4027-93df-bb46e3350485-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.967620] env[61545]: DEBUG nova.compute.manager [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] No waiting events found dispatching network-vif-plugged-aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 992.967804] env[61545]: WARNING nova.compute.manager [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Received unexpected event network-vif-plugged-aacdaf9b-9518-4298-a1df-ce1c3931e8e2 for instance with vm_state building and task_state spawning. [ 992.967996] env[61545]: DEBUG nova.compute.manager [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Received event network-changed-aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 992.968213] env[61545]: DEBUG nova.compute.manager [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Refreshing instance network info cache due to event network-changed-aacdaf9b-9518-4298-a1df-ce1c3931e8e2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 992.968462] env[61545]: DEBUG oslo_concurrency.lockutils [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] Acquiring lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.968662] env[61545]: DEBUG oslo_concurrency.lockutils [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] Acquired lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.968914] env[61545]: DEBUG nova.network.neutron [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Refreshing network info cache for port aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.979360] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256196, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.979360] env[61545]: DEBUG oslo_vmware.api [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.739828} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.979360] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.979525] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.979696] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.979936] env[61545]: INFO nova.compute.manager [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Took 1.66 seconds to destroy the instance on the hypervisor. [ 992.980228] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 992.980747] env[61545]: DEBUG nova.compute.manager [-] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 992.980851] env[61545]: DEBUG nova.network.neutron [-] [instance: fff833ad-55af-4702-859b-05f94cac18c8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.086769] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.087266] env[61545]: DEBUG nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 993.093670] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.544s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.094327] env[61545]: DEBUG nova.objects.instance [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lazy-loading 'resources' on Instance uuid 62301196-fb8a-45fe-9193-0ad8f7126ab5 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.202655] env[61545]: DEBUG oslo_vmware.api [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256193, 'name': PowerOffVM_Task, 'duration_secs': 0.224142} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.204148] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 993.204148] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 993.204148] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9eecc4b-7676-44a5-a88b-2dca1789d999 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.254028] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256194, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110328} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.254028] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.254028] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b71da2b-c42b-4490-9e03-4b30f959dfb5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.284452] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] d0f42893-3332-4027-93df-bb46e3350485/d0f42893-3332-4027-93df-bb46e3350485.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.295511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.295511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.295511] env[61545]: INFO nova.compute.manager [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Attaching volume 705a0f51-10e1-4167-b382-baf0f7935774 to /dev/sdb [ 993.295511] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bfc21cb-6837-4545-8bcd-92c88ceec9e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.314636] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 993.315209] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 993.315496] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Deleting the datastore file [datastore2] eced4107-b99e-479e-b22c-2157320ecf95 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.317033] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b28ab54-d73b-41eb-8ec2-295b943d0337 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.336417] env[61545]: DEBUG nova.network.neutron [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 993.339541] env[61545]: DEBUG oslo_vmware.api [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310006} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.343090] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.343438] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.343742] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.344016] env[61545]: INFO nova.compute.manager [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Took 2.49 seconds to destroy the instance on the hypervisor. [ 993.344345] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 993.345160] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 993.345160] env[61545]: value = "task-4256198" [ 993.345160] env[61545]: _type = "Task" [ 993.345160] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.345554] env[61545]: DEBUG oslo_vmware.api [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for the task: (returnval){ [ 993.345554] env[61545]: value = "task-4256199" [ 993.345554] env[61545]: _type = "Task" [ 993.345554] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.345897] env[61545]: DEBUG nova.compute.manager [-] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 993.346064] env[61545]: DEBUG nova.network.neutron [-] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.362252] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.368778] env[61545]: DEBUG oslo_vmware.api [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.383321] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a78177-ed01-40ac-9365-31793d3bd393 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.391599] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9089d3b8-443b-4378-8b0a-c29741cda3bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.412422] env[61545]: DEBUG nova.virt.block_device [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updating existing volume attachment record: b7906c29-be2f-4fb4-aee4-41177936a3fd {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 993.468068] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256196, 'name': CreateVM_Task, 'duration_secs': 0.384888} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.468307] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 993.469065] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.469158] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.469465] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 993.469728] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c0c5254-62a9-4e14-93e9-8d83558a34e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.476168] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 993.476168] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52498bc9-4030-0c18-26e0-4725ece9d625" [ 993.476168] env[61545]: _type = "Task" [ 993.476168] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.487963] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52498bc9-4030-0c18-26e0-4725ece9d625, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.519435] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "855904d8-7eb3-405d-9236-ab4ba9b33940" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.519598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "855904d8-7eb3-405d-9236-ab4ba9b33940" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.605607] env[61545]: DEBUG nova.compute.utils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 993.615028] env[61545]: DEBUG nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 993.615028] env[61545]: DEBUG nova.network.neutron [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 993.803920] env[61545]: DEBUG nova.policy [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b9cf474f2eb4103af0f351bc6d3c6e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc2e8ab1dea400ca086a3039117cbff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 993.865968] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.870452] env[61545]: DEBUG oslo_vmware.api [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Task: {'id': task-4256199, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.334809} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.873302] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.873302] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.873302] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.873302] env[61545]: INFO nova.compute.manager [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Took 1.21 seconds to destroy the instance on the hypervisor. [ 993.873302] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 993.876126] env[61545]: DEBUG nova.compute.manager [-] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 993.876238] env[61545]: DEBUG nova.network.neutron [-] [instance: eced4107-b99e-479e-b22c-2157320ecf95] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.882506] env[61545]: DEBUG nova.network.neutron [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updated VIF entry in instance network info cache for port aacdaf9b-9518-4298-a1df-ce1c3931e8e2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 993.882875] env[61545]: DEBUG nova.network.neutron [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updating instance_info_cache with network_info: [{"id": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "address": "fa:16:3e:3d:9f:82", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaacdaf9b-95", "ovs_interfaceid": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.982132] env[61545]: DEBUG nova.network.neutron [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Updating instance_info_cache with network_info: [{"id": "039bac9c-919b-4727-8313-ea1206afb5ec", "address": "fa:16:3e:b8:41:a5", "network": {"id": "25c1c752-e1ef-4183-800c-7dac72302800", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-484354822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b012a7a97a46309725d337c78910b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap039bac9c-91", "ovs_interfaceid": "039bac9c-919b-4727-8313-ea1206afb5ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.994379] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52498bc9-4030-0c18-26e0-4725ece9d625, 'name': SearchDatastore_Task, 'duration_secs': 0.015819} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.994805] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.995083] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.995394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.995583] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.995786] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.996746] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94b053f1-a2c6-424e-b10e-8c478a4c3b19 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.007632] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.007897] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 994.008847] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52d42e4d-6030-46bd-9a5b-329f998f29e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.020957] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 994.020957] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ac970-107c-22ce-4eb3-1aa0d408fbaf" [ 994.020957] env[61545]: _type = "Task" [ 994.020957] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.021427] env[61545]: DEBUG nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 994.041501] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ac970-107c-22ce-4eb3-1aa0d408fbaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.119025] env[61545]: DEBUG nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 994.122996] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7731ae15-025e-491e-95e9-0c7d805d9109 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.133246] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eb54cc-692f-4306-a0e7-e8c8e6752b3c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.170757] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64ba9fa-be7c-494e-b37f-5563f190c683 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.181212] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e35b48-ed40-4d88-930a-fb03cc4984b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.204262] env[61545]: DEBUG nova.compute.provider_tree [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.359693] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256198, 'name': ReconfigVM_Task, 'duration_secs': 0.893375} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.359790] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Reconfigured VM instance instance-0000004a to attach disk [datastore2] d0f42893-3332-4027-93df-bb46e3350485/d0f42893-3332-4027-93df-bb46e3350485.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.360541] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-203b2ce7-8922-46db-9a24-542bae6bb28d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.369394] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 994.369394] env[61545]: value = "task-4256203" [ 994.369394] env[61545]: _type = "Task" [ 994.369394] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.384938] env[61545]: DEBUG nova.network.neutron [-] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.386647] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256203, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.387351] env[61545]: DEBUG oslo_concurrency.lockutils [req-f3f20b63-869f-4c09-9416-ecae1edc3c8f req-e81294cd-e23c-4233-9fda-47831c5a7547 service nova] Releasing lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.488533] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Releasing lock "refresh_cache-fed2c050-74e7-48f1-8a19-7c58e26d2159" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.488879] env[61545]: DEBUG nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Instance network_info: |[{"id": "039bac9c-919b-4727-8313-ea1206afb5ec", "address": "fa:16:3e:b8:41:a5", "network": {"id": "25c1c752-e1ef-4183-800c-7dac72302800", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-484354822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b012a7a97a46309725d337c78910b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap039bac9c-91", "ovs_interfaceid": "039bac9c-919b-4727-8313-ea1206afb5ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 994.489348] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:41:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '039bac9c-919b-4727-8313-ea1206afb5ec', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.498379] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Creating folder: Project (b1b012a7a97a46309725d337c78910b5). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 994.498722] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24f77f04-5fa9-4606-8635-e327ca151d61 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.515903] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Created folder: Project (b1b012a7a97a46309725d337c78910b5) in parent group-v838542. [ 994.515903] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Creating folder: Instances. Parent ref: group-v838750. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 994.515903] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-129fb6fd-3813-4283-b68f-0a9feb4ae8cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.527233] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Created folder: Instances in parent group-v838750. [ 994.527523] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 994.528219] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.528802] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bbb7cf9b-5893-4fe0-8756-3f83a0a0d79a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.553259] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ac970-107c-22ce-4eb3-1aa0d408fbaf, 'name': SearchDatastore_Task, 'duration_secs': 0.047225} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.559821] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b924e3a8-4fa5-4701-a108-79d6718ea8a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.565384] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.565384] env[61545]: value = "task-4256206" [ 994.565384] env[61545]: _type = "Task" [ 994.565384] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.574102] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 994.574102] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5277f19f-5856-e69e-1d68-aa0f9dad1221" [ 994.574102] env[61545]: _type = "Task" [ 994.574102] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.578513] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.583123] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256206, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.588184] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5277f19f-5856-e69e-1d68-aa0f9dad1221, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.629533] env[61545]: INFO nova.virt.block_device [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Booting with volume 11641b07-7823-42c5-8e71-d45453cc6704 at /dev/sda [ 994.689787] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80271e89-0e44-468f-aa82-542d26530bcd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.701358] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed36e297-8794-40eb-8b08-8006110e6aaa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.714109] env[61545]: DEBUG nova.scheduler.client.report [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.747355] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e52ae11-7dea-4c9d-897d-063679973c15 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.757055] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95d83fd-2b87-424c-98ae-7a2b3777e79d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.799640] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcffba1e-a83e-4255-bd07-f2a135bc3a02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.807034] env[61545]: DEBUG nova.network.neutron [-] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.809252] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bbe539-7ec7-4c04-b995-78690a822d82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.824948] env[61545]: DEBUG nova.virt.block_device [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Updating existing volume attachment record: 7b0e596c-ba2d-488b-97ae-68450ccd7181 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 994.871405] env[61545]: DEBUG nova.network.neutron [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Successfully created port: fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 994.883363] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256203, 'name': Rename_Task, 'duration_secs': 0.174212} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.883896] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.884518] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0cb8fe20-8400-4924-9e10-282895c3f19c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.887834] env[61545]: INFO nova.compute.manager [-] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Took 1.91 seconds to deallocate network for instance. [ 994.896232] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 994.896232] env[61545]: value = "task-4256207" [ 994.896232] env[61545]: _type = "Task" [ 994.896232] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.907541] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.078209] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256206, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.089292] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5277f19f-5856-e69e-1d68-aa0f9dad1221, 'name': SearchDatastore_Task, 'duration_secs': 0.026511} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.089639] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.090127] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 995.090248] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8362223-d5c0-4323-a4b2-0800db842bc8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.099565] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 995.099565] env[61545]: value = "task-4256208" [ 995.099565] env[61545]: _type = "Task" [ 995.099565] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.109459] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.180547] env[61545]: DEBUG nova.network.neutron [-] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.223008] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.130s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.226495] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.576s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.231880] env[61545]: INFO nova.compute.claims [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.250031] env[61545]: INFO nova.scheduler.client.report [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Deleted allocations for instance 62301196-fb8a-45fe-9193-0ad8f7126ab5 [ 995.314781] env[61545]: INFO nova.compute.manager [-] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Took 1.97 seconds to deallocate network for instance. [ 995.399346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.413223] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256207, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.472390] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "1e5be92c-d727-4515-9e16-85ade2719455" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.475564] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.002s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.578427] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256206, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.615361] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256208, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.686023] env[61545]: INFO nova.compute.manager [-] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Took 1.81 seconds to deallocate network for instance. [ 995.762162] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e4c8549e-bc58-4c6b-84fa-59bc188141f5 tempest-ListServerFiltersTestJSON-816069453 tempest-ListServerFiltersTestJSON-816069453-project-member] Lock "62301196-fb8a-45fe-9193-0ad8f7126ab5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.681s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.865772] env[61545]: DEBUG nova.compute.manager [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Received event network-vif-plugged-039bac9c-919b-4727-8313-ea1206afb5ec {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 995.866021] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] Acquiring lock "fed2c050-74e7-48f1-8a19-7c58e26d2159-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.866173] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] Lock "fed2c050-74e7-48f1-8a19-7c58e26d2159-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.866420] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] Lock "fed2c050-74e7-48f1-8a19-7c58e26d2159-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.866659] env[61545]: DEBUG nova.compute.manager [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] No waiting events found dispatching network-vif-plugged-039bac9c-919b-4727-8313-ea1206afb5ec {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 995.866914] env[61545]: WARNING nova.compute.manager [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Received unexpected event network-vif-plugged-039bac9c-919b-4727-8313-ea1206afb5ec for instance with vm_state building and task_state spawning. [ 995.867160] env[61545]: DEBUG nova.compute.manager [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Received event network-changed-039bac9c-919b-4727-8313-ea1206afb5ec {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 995.867454] env[61545]: DEBUG nova.compute.manager [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Refreshing instance network info cache due to event network-changed-039bac9c-919b-4727-8313-ea1206afb5ec. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 995.867663] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] Acquiring lock "refresh_cache-fed2c050-74e7-48f1-8a19-7c58e26d2159" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.867889] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] Acquired lock "refresh_cache-fed2c050-74e7-48f1-8a19-7c58e26d2159" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.868118] env[61545]: DEBUG nova.network.neutron [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Refreshing network info cache for port 039bac9c-919b-4727-8313-ea1206afb5ec {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 995.910793] env[61545]: DEBUG oslo_vmware.api [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256207, 'name': PowerOnVM_Task, 'duration_secs': 0.576913} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.911071] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.911280] env[61545]: INFO nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Took 8.06 seconds to spawn the instance on the hypervisor. [ 995.911462] env[61545]: DEBUG nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.912261] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb32d5c-68a3-493f-a8ee-19a9a2c6c45c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.980260] env[61545]: DEBUG nova.compute.utils [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 996.008160] env[61545]: INFO nova.compute.manager [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Took 0.69 seconds to detach 1 volumes for instance. [ 996.076855] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256206, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.118860] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5807} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.119146] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 996.119548] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 996.121133] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37ab83c7-0329-4961-972b-a4bc50754564 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.128583] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 996.128583] env[61545]: value = "task-4256210" [ 996.128583] env[61545]: _type = "Task" [ 996.128583] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.140161] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.193294] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.435483] env[61545]: INFO nova.compute.manager [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Took 26.67 seconds to build instance. [ 996.484131] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.515748] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.576735] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256206, 'name': CreateVM_Task, 'duration_secs': 1.589164} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.576997] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 996.577741] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.578955] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.578955] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 996.578955] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8a70985-83a8-4fb2-b5de-681dcb801cf7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.591629] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 996.591629] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525985e6-e486-86ef-10a7-b62ef7ab2495" [ 996.591629] env[61545]: _type = "Task" [ 996.591629] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.602722] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525985e6-e486-86ef-10a7-b62ef7ab2495, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.641845] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170958} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.642032] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.643457] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cae6c8-46d1-4774-a4aa-b0ec746901a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.661756] env[61545]: DEBUG nova.compute.manager [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 996.671866] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.678374] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e0689e-f56a-4c8d-a26d-d26031483cf2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.681619] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-090dd72b-1743-47e7-ab1f-7a2e981d5ea7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.705354] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 996.705354] env[61545]: value = "task-4256211" [ 996.705354] env[61545]: _type = "Task" [ 996.705354] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.717882] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256211, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.787677] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84e6d0d-e48a-4d55-b1ad-3b2b84d3d52d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.796349] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f1850b-8561-4d71-a672-e5a5151c2804 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.831980] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dd7948-3530-4b45-84d9-0a5b5ea15198 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.841518] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6530bf2a-3e35-4e32-9b51-ad77b6055f63 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.857021] env[61545]: DEBUG nova.compute.provider_tree [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.859198] env[61545]: DEBUG nova.network.neutron [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Updated VIF entry in instance network info cache for port 039bac9c-919b-4727-8313-ea1206afb5ec. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 996.859571] env[61545]: DEBUG nova.network.neutron [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Updating instance_info_cache with network_info: [{"id": "039bac9c-919b-4727-8313-ea1206afb5ec", "address": "fa:16:3e:b8:41:a5", "network": {"id": "25c1c752-e1ef-4183-800c-7dac72302800", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-484354822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b012a7a97a46309725d337c78910b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap039bac9c-91", "ovs_interfaceid": "039bac9c-919b-4727-8313-ea1206afb5ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.922873] env[61545]: DEBUG nova.network.neutron [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Successfully updated port: fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.943822] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5794af99-0716-4a1b-8907-d79f59d6124c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "d0f42893-3332-4027-93df-bb46e3350485" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.189s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.050297] env[61545]: DEBUG nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 997.051095] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 997.051163] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.051280] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 997.051477] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.051693] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 997.051928] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 997.052219] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 997.052385] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 997.052597] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 997.052786] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 997.053957] env[61545]: DEBUG nova.virt.hardware [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 997.054799] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce511693-6f3a-413b-b136-6101d9e33041 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.064600] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574efcf1-35ac-49a6-8c73-f37a4c57cb3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.100504] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525985e6-e486-86ef-10a7-b62ef7ab2495, 'name': SearchDatastore_Task, 'duration_secs': 0.011503} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.101616] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.101831] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.102149] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.102342] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.102535] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.103311] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e765cdb3-94ad-4156-89cd-0f3143f90e25 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.114267] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.114490] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.115606] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64ea698-6768-4af8-9f2d-ea36383242f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.122690] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 997.122690] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525db9da-ca40-f357-de19-1341fada95fb" [ 997.122690] env[61545]: _type = "Task" [ 997.122690] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.131107] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525db9da-ca40-f357-de19-1341fada95fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.208401] env[61545]: INFO nova.compute.manager [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] instance snapshotting [ 997.216606] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5eac64e-4805-4f25-b57d-1a45f90c1392 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.227538] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256211, 'name': ReconfigVM_Task, 'duration_secs': 0.290431} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.241742] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.242248] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75e31652-1f48-4e02-8f27-cddf6623fbc0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.248036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80288906-f914-4479-9d42-fb57f7ff8642 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.256197] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 997.256197] env[61545]: value = "task-4256212" [ 997.256197] env[61545]: _type = "Task" [ 997.256197] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.267826] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256212, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.363125] env[61545]: DEBUG nova.scheduler.client.report [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 997.368697] env[61545]: DEBUG oslo_concurrency.lockutils [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] Releasing lock "refresh_cache-fed2c050-74e7-48f1-8a19-7c58e26d2159" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.369386] env[61545]: DEBUG nova.compute.manager [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Received event network-vif-deleted-28a12f95-4a10-42db-ac3a-4fe609682144 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 997.370300] env[61545]: DEBUG nova.compute.manager [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Received event network-vif-deleted-dfbfdd5b-df4f-4326-b48a-69bd14494d5a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 997.370300] env[61545]: DEBUG nova.compute.manager [req-dcd37675-529a-4d1a-b2e0-d8839b0edbcd req-009e418b-23a6-49ac-bab7-8ffebd7e5e76 service nova] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Received event network-vif-deleted-d33a6a2d-6310-4263-adf4-dcf09ce72a6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 997.425788] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Acquiring lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.425952] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Acquired lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.426119] env[61545]: DEBUG nova.network.neutron [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.588283] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "1e5be92c-d727-4515-9e16-85ade2719455" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.588822] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.589179] env[61545]: INFO nova.compute.manager [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Attaching volume dc427f98-25af-4605-aa76-6df488552e30 to /dev/sdb [ 997.633043] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ff37b9-5ac6-4db0-ab17-474369752bf5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.642900] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525db9da-ca40-f357-de19-1341fada95fb, 'name': SearchDatastore_Task, 'duration_secs': 0.014392} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.645862] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61f3fa29-8c3d-4322-be16-59ea531c3dfc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.657029] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec75aac-3834-42ac-b795-8247fd360eb3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.661031] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 997.661031] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523f12c4-4570-43bc-a240-7907c1a2db1f" [ 997.661031] env[61545]: _type = "Task" [ 997.661031] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.669562] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523f12c4-4570-43bc-a240-7907c1a2db1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.674142] env[61545]: DEBUG nova.virt.block_device [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating existing volume attachment record: 775931a4-27bd-4d0c-b0c5-9a2dcdfc2eac {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 997.758869] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 997.759123] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f2a29bc8-3acf-43d0-9acf-3775bd502334 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.772404] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256212, 'name': Rename_Task, 'duration_secs': 0.150778} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.775722] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.776033] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 997.776033] env[61545]: value = "task-4256213" [ 997.776033] env[61545]: _type = "Task" [ 997.776033] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.776320] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c441ff6-ab3f-4ddd-ae78-e383ac6c4d71 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.789702] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256213, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.791643] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 997.791643] env[61545]: value = "task-4256214" [ 997.791643] env[61545]: _type = "Task" [ 997.791643] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.801731] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.874048] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.874772] env[61545]: DEBUG nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 997.877367] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.299s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.879810] env[61545]: INFO nova.compute.claims [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 998.005804] env[61545]: DEBUG nova.network.neutron [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.173979] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523f12c4-4570-43bc-a240-7907c1a2db1f, 'name': SearchDatastore_Task, 'duration_secs': 0.012576} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.174149] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.174654] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fed2c050-74e7-48f1-8a19-7c58e26d2159/fed2c050-74e7-48f1-8a19-7c58e26d2159.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.174803] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c37cd726-9827-4ad0-ae71-dfaf2e9f7275 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.196808] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 998.196808] env[61545]: value = "task-4256217" [ 998.196808] env[61545]: _type = "Task" [ 998.196808] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.201092] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.293518] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256213, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.309960] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256214, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.385378] env[61545]: DEBUG nova.compute.utils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 998.397359] env[61545]: DEBUG nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 998.397637] env[61545]: DEBUG nova.network.neutron [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 998.404482] env[61545]: DEBUG nova.compute.manager [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Received event network-vif-plugged-fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 998.404629] env[61545]: DEBUG oslo_concurrency.lockutils [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] Acquiring lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.404822] env[61545]: DEBUG oslo_concurrency.lockutils [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] Lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.404986] env[61545]: DEBUG oslo_concurrency.lockutils [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] Lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.405219] env[61545]: DEBUG nova.compute.manager [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] No waiting events found dispatching network-vif-plugged-fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 998.405441] env[61545]: WARNING nova.compute.manager [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Received unexpected event network-vif-plugged-fab93887-61ce-41f0-a531-d540740b5acd for instance with vm_state building and task_state spawning. [ 998.405675] env[61545]: DEBUG nova.compute.manager [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Received event network-changed-fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 998.405930] env[61545]: DEBUG nova.compute.manager [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Refreshing instance network info cache due to event network-changed-fab93887-61ce-41f0-a531-d540740b5acd. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 998.406020] env[61545]: DEBUG oslo_concurrency.lockutils [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] Acquiring lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.410917] env[61545]: DEBUG nova.network.neutron [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Updating instance_info_cache with network_info: [{"id": "fab93887-61ce-41f0-a531-d540740b5acd", "address": "fa:16:3e:3c:af:43", "network": {"id": "32d6e817-cd7d-4b1a-bbf6-470349a1b3aa", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1302718488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc2e8ab1dea400ca086a3039117cbff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfab93887-61", "ovs_interfaceid": "fab93887-61ce-41f0-a531-d540740b5acd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.475375] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 998.475375] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838749', 'volume_id': '705a0f51-10e1-4167-b382-baf0f7935774', 'name': 'volume-705a0f51-10e1-4167-b382-baf0f7935774', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2bb4ea0-e9fb-4198-80fa-acfd25fb226d', 'attached_at': '', 'detached_at': '', 'volume_id': '705a0f51-10e1-4167-b382-baf0f7935774', 'serial': '705a0f51-10e1-4167-b382-baf0f7935774'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 998.475840] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a4addf-2a3d-4387-9e91-9beb5606f4bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.497719] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cee5193-1b16-4b75-a2e6-3e9ad4bb3f8d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.526507] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] volume-705a0f51-10e1-4167-b382-baf0f7935774/volume-705a0f51-10e1-4167-b382-baf0f7935774.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.528549] env[61545]: DEBUG nova.policy [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '113fc58985704b0b9e0a28be2f61cd68', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9007a6e389c0467c8e2077309984eaab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 998.531224] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bc0f54c-0a71-4b02-91f5-07b68754d70d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.561616] env[61545]: DEBUG oslo_vmware.api [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 998.561616] env[61545]: value = "task-4256219" [ 998.561616] env[61545]: _type = "Task" [ 998.561616] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.575481] env[61545]: DEBUG oslo_vmware.api [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256219, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.703487] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256217, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.792404] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256213, 'name': CreateSnapshot_Task, 'duration_secs': 0.6443} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.792753] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 998.793687] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa0d57c-bf34-404f-a975-373b95c33552 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.806660] env[61545]: DEBUG oslo_vmware.api [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256214, 'name': PowerOnVM_Task, 'duration_secs': 0.526915} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.813832] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.814487] env[61545]: DEBUG nova.compute.manager [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.818701] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ffedab-b463-4c9e-9237-0c880d9ccc06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.896860] env[61545]: DEBUG nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 998.914496] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Releasing lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.915089] env[61545]: DEBUG nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Instance network_info: |[{"id": "fab93887-61ce-41f0-a531-d540740b5acd", "address": "fa:16:3e:3c:af:43", "network": {"id": "32d6e817-cd7d-4b1a-bbf6-470349a1b3aa", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1302718488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc2e8ab1dea400ca086a3039117cbff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfab93887-61", "ovs_interfaceid": "fab93887-61ce-41f0-a531-d540740b5acd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 998.915425] env[61545]: DEBUG oslo_concurrency.lockutils [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] Acquired lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.915641] env[61545]: DEBUG nova.network.neutron [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Refreshing network info cache for port fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 998.916859] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:af:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fab93887-61ce-41f0-a531-d540740b5acd', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 998.925876] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Creating folder: Project (4bc2e8ab1dea400ca086a3039117cbff). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 998.930047] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.931403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.931403] env[61545]: DEBUG nova.compute.manager [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.931403] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49cd969a-8c20-46e3-815f-64c22447639a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.934520] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a21ad5b-02b3-43ee-ad12-654f1774b7e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.945934] env[61545]: DEBUG nova.compute.manager [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 998.946374] env[61545]: DEBUG nova.objects.instance [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'flavor' on Instance uuid 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.955467] env[61545]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 998.955467] env[61545]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61545) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 998.955467] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Folder already exists: Project (4bc2e8ab1dea400ca086a3039117cbff). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 998.955467] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Creating folder: Instances. Parent ref: group-v838726. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 998.955863] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c699ff58-4af2-4620-8153-7d816687262b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.968877] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Created folder: Instances in parent group-v838726. [ 998.968877] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 998.971832] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 998.976861] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22d3975c-184d-495b-b079-85afc60617fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.006182] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.006182] env[61545]: value = "task-4256222" [ 999.006182] env[61545]: _type = "Task" [ 999.006182] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.018853] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256222, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.025017] env[61545]: DEBUG nova.compute.manager [req-81bd5fda-9866-44d1-9eed-abbfb5c73b35 req-2a33846a-aa90-458b-8406-908825f03de1 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Received event network-changed-aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 999.025017] env[61545]: DEBUG nova.compute.manager [req-81bd5fda-9866-44d1-9eed-abbfb5c73b35 req-2a33846a-aa90-458b-8406-908825f03de1 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Refreshing instance network info cache due to event network-changed-aacdaf9b-9518-4298-a1df-ce1c3931e8e2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 999.025017] env[61545]: DEBUG oslo_concurrency.lockutils [req-81bd5fda-9866-44d1-9eed-abbfb5c73b35 req-2a33846a-aa90-458b-8406-908825f03de1 service nova] Acquiring lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.025017] env[61545]: DEBUG oslo_concurrency.lockutils [req-81bd5fda-9866-44d1-9eed-abbfb5c73b35 req-2a33846a-aa90-458b-8406-908825f03de1 service nova] Acquired lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.025017] env[61545]: DEBUG nova.network.neutron [req-81bd5fda-9866-44d1-9eed-abbfb5c73b35 req-2a33846a-aa90-458b-8406-908825f03de1 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Refreshing network info cache for port aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.068769] env[61545]: DEBUG oslo_vmware.api [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256219, 'name': ReconfigVM_Task, 'duration_secs': 0.504498} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.069057] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Reconfigured VM instance instance-0000003f to attach disk [datastore1] volume-705a0f51-10e1-4167-b382-baf0f7935774/volume-705a0f51-10e1-4167-b382-baf0f7935774.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.077758] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2c24e34-efab-49ce-a000-f68946c60400 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.102865] env[61545]: DEBUG oslo_vmware.api [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 999.102865] env[61545]: value = "task-4256223" [ 999.102865] env[61545]: _type = "Task" [ 999.102865] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.120261] env[61545]: DEBUG oslo_vmware.api [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256223, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.206268] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256217, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56514} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.214577] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fed2c050-74e7-48f1-8a19-7c58e26d2159/fed2c050-74e7-48f1-8a19-7c58e26d2159.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 999.214978] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 999.219183] env[61545]: DEBUG nova.network.neutron [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Successfully created port: ac373ca4-eda8-462a-a658-52fa15cb8b0b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.219183] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35dd8435-7400-43e8-9a58-362ce994ea87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.241243] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 999.241243] env[61545]: value = "task-4256224" [ 999.241243] env[61545]: _type = "Task" [ 999.241243] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.253223] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.339521] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 999.340409] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b49c7af9-c4c7-4426-a68c-ccf91548999b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.354862] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 999.354862] env[61545]: value = "task-4256225" [ 999.354862] env[61545]: _type = "Task" [ 999.354862] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.359614] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.369607] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256225, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.481786] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc3c2fb-7538-498d-89cd-a90e2d222659 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.491551] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8b6e86-6fce-4587-868b-ee4d9a7986d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.539584] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5121278-4277-4828-8133-28bdc2b28faf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.553402] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256222, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.556899] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a525af-39a3-45f1-911b-c0e3f6f30a5d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.574892] env[61545]: DEBUG nova.compute.provider_tree [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.624132] env[61545]: DEBUG oslo_vmware.api [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256223, 'name': ReconfigVM_Task, 'duration_secs': 0.15741} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.624132] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838749', 'volume_id': '705a0f51-10e1-4167-b382-baf0f7935774', 'name': 'volume-705a0f51-10e1-4167-b382-baf0f7935774', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2bb4ea0-e9fb-4198-80fa-acfd25fb226d', 'attached_at': '', 'detached_at': '', 'volume_id': '705a0f51-10e1-4167-b382-baf0f7935774', 'serial': '705a0f51-10e1-4167-b382-baf0f7935774'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 999.705380] env[61545]: DEBUG nova.network.neutron [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Updated VIF entry in instance network info cache for port fab93887-61ce-41f0-a531-d540740b5acd. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 999.705813] env[61545]: DEBUG nova.network.neutron [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Updating instance_info_cache with network_info: [{"id": "fab93887-61ce-41f0-a531-d540740b5acd", "address": "fa:16:3e:3c:af:43", "network": {"id": "32d6e817-cd7d-4b1a-bbf6-470349a1b3aa", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1302718488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc2e8ab1dea400ca086a3039117cbff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfab93887-61", "ovs_interfaceid": "fab93887-61ce-41f0-a531-d540740b5acd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.746082] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "56680678-c844-4dd2-8541-d50de83b22d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.746082] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "56680678-c844-4dd2-8541-d50de83b22d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.746259] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "56680678-c844-4dd2-8541-d50de83b22d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.749338] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "56680678-c844-4dd2-8541-d50de83b22d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.749338] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "56680678-c844-4dd2-8541-d50de83b22d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.754567] env[61545]: INFO nova.compute.manager [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Terminating instance [ 999.763334] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075219} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.763689] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 999.764578] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16eaeddb-02d9-4a17-bcbc-4e258c5008ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.797480] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] fed2c050-74e7-48f1-8a19-7c58e26d2159/fed2c050-74e7-48f1-8a19-7c58e26d2159.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.801982] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef4fc63b-65c7-4473-9d5d-50c1f596c992 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.825510] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 999.825510] env[61545]: value = "task-4256226" [ 999.825510] env[61545]: _type = "Task" [ 999.825510] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.836603] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.868984] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256225, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.912337] env[61545]: DEBUG nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 999.942068] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b0c85290dc35c35c5b66997bba68c25f',container_format='bare',created_at=2025-06-03T12:52:08Z,direct_url=,disk_format='vmdk',id=59ed3cb3-d35d-4df7-8ff8-ffc462b36b78,min_disk=1,min_ram=0,name='tempest-test-snap-974547145',owner='9007a6e389c0467c8e2077309984eaab',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-06-03T12:52:25Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 999.942392] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.942559] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 999.943215] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.943215] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 999.943215] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 999.943375] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 999.943414] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 999.943574] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 999.943766] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 999.944060] env[61545]: DEBUG nova.virt.hardware [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 999.945339] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139432de-6fb3-4930-a98a-04a03a2f8758 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.954316] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5faaad0-6ba4-48f1-98d8-e5a40f98a20d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.960329] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.960622] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42db8b43-ff27-456e-90b0-88abadea4813 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.980316] env[61545]: DEBUG oslo_vmware.api [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 999.980316] env[61545]: value = "task-4256227" [ 999.980316] env[61545]: _type = "Task" [ 999.980316] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.989781] env[61545]: DEBUG oslo_vmware.api [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.042729] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256222, 'name': CreateVM_Task, 'duration_secs': 0.769892} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.042888] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.043604] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'delete_on_termination': True, 'boot_index': 0, 'device_type': None, 'attachment_id': '7b0e596c-ba2d-488b-97ae-68450ccd7181', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838731', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'name': 'volume-11641b07-7823-42c5-8e71-d45453cc6704', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b', 'attached_at': '', 'detached_at': '', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'serial': '11641b07-7823-42c5-8e71-d45453cc6704'}, 'guest_format': None, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=61545) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1000.043821] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Root volume attach. Driver type: vmdk {{(pid=61545) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1000.045212] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbc24a5-a690-4a4e-9036-be0ad25e2bc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.054213] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14691e08-728a-48f2-90f7-43ff16073d2b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.061867] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aae66fa-8834-450e-a119-041b6808707f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.071240] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-ce34edde-0735-4e0e-82b7-227087150c98 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.078592] env[61545]: DEBUG nova.scheduler.client.report [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.084795] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1000.084795] env[61545]: value = "task-4256228" [ 1000.084795] env[61545]: _type = "Task" [ 1000.084795] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.094520] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.209364] env[61545]: DEBUG oslo_concurrency.lockutils [req-447c0def-8c4b-412d-a3c7-cf5cf0ca2857 req-a60d9d5b-f2d9-40f2-84fe-632da770d5d4 service nova] Releasing lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.219891] env[61545]: DEBUG nova.network.neutron [req-81bd5fda-9866-44d1-9eed-abbfb5c73b35 req-2a33846a-aa90-458b-8406-908825f03de1 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updated VIF entry in instance network info cache for port aacdaf9b-9518-4298-a1df-ce1c3931e8e2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.220444] env[61545]: DEBUG nova.network.neutron [req-81bd5fda-9866-44d1-9eed-abbfb5c73b35 req-2a33846a-aa90-458b-8406-908825f03de1 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updating instance_info_cache with network_info: [{"id": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "address": "fa:16:3e:3d:9f:82", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaacdaf9b-95", "ovs_interfaceid": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.259263] env[61545]: DEBUG nova.compute.manager [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.259551] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.260574] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfa1ce9-0495-43d4-b9f2-21b27ab55941 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.274521] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.274642] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1c486de-780e-482c-be1f-b307a98355ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.285050] env[61545]: DEBUG oslo_vmware.api [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1000.285050] env[61545]: value = "task-4256229" [ 1000.285050] env[61545]: _type = "Task" [ 1000.285050] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.297941] env[61545]: DEBUG oslo_vmware.api [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.341756] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256226, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.375064] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256225, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.498868] env[61545]: DEBUG oslo_vmware.api [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256227, 'name': PowerOffVM_Task, 'duration_secs': 0.212498} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.499406] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.499608] env[61545]: DEBUG nova.compute.manager [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.500912] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e079162f-74d7-440c-a338-e5ac91d8151b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.586215] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.587352] env[61545]: DEBUG nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1000.590886] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.191s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.591195] env[61545]: DEBUG nova.objects.instance [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lazy-loading 'resources' on Instance uuid fff833ad-55af-4702-859b-05f94cac18c8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.611192] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task} progress is 40%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.691366] env[61545]: DEBUG nova.objects.instance [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.726030] env[61545]: DEBUG oslo_concurrency.lockutils [req-81bd5fda-9866-44d1-9eed-abbfb5c73b35 req-2a33846a-aa90-458b-8406-908825f03de1 service nova] Releasing lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.798572] env[61545]: DEBUG oslo_vmware.api [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256229, 'name': PowerOffVM_Task, 'duration_secs': 0.304037} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.798934] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.799160] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.799566] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bf6b07d-1c87-4b4a-9ea5-67db257d5282 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.843115] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256226, 'name': ReconfigVM_Task, 'duration_secs': 0.6559} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.843115] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Reconfigured VM instance instance-0000004b to attach disk [datastore2] fed2c050-74e7-48f1-8a19-7c58e26d2159/fed2c050-74e7-48f1-8a19-7c58e26d2159.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.843115] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43af9743-e691-4e07-8001-37b80234ad5d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.856034] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 1000.856034] env[61545]: value = "task-4256232" [ 1000.856034] env[61545]: _type = "Task" [ 1000.856034] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.867493] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256232, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.877326] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256225, 'name': CloneVM_Task, 'duration_secs': 1.503562} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.877326] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Created linked-clone VM from snapshot [ 1000.877326] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcce4cd-5f96-46da-8398-53f3a8146be1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.887608] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Uploading image 3e5b9c96-51bb-41c7-82aa-57f618f730be {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1000.891859] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.891859] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.891859] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleting the datastore file [datastore2] 56680678-c844-4dd2-8541-d50de83b22d7 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.892260] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e24e974b-412d-4d3f-92e1-7ef0c339dfe9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.903313] env[61545]: DEBUG oslo_vmware.api [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1000.903313] env[61545]: value = "task-4256233" [ 1000.903313] env[61545]: _type = "Task" [ 1000.903313] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.910478] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1000.913391] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-94aa3b74-7723-463e-beae-a538092f9f1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.923346] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1000.923346] env[61545]: value = "task-4256234" [ 1000.923346] env[61545]: _type = "Task" [ 1000.923346] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.926955] env[61545]: DEBUG oslo_vmware.api [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256233, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.941460] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256234, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.021277] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8ecb427d-f917-4691-9c8e-fc625bef61eb tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.091s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.111008] env[61545]: DEBUG nova.compute.utils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1001.113043] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task} progress is 53%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.118498] env[61545]: DEBUG nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1001.118857] env[61545]: DEBUG nova.network.neutron [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1001.201288] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fb1e42ab-7ecf-4ce5-8f98-b91fd4174792 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.905s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.224278] env[61545]: DEBUG nova.policy [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9bcc01a701c4b728d810b0b27ce6249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aeb51ace7650413b987be7ddd7490182', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1001.317964] env[61545]: INFO nova.compute.manager [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Rebuilding instance [ 1001.383080] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256232, 'name': Rename_Task, 'duration_secs': 0.19275} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.387305] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.395044] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8e252ee-17e3-487e-a6d3-3527d7cf9f14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.422721] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 1001.422721] env[61545]: value = "task-4256235" [ 1001.422721] env[61545]: _type = "Task" [ 1001.422721] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.451345] env[61545]: DEBUG oslo_vmware.api [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256233, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.307858} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.456297] env[61545]: DEBUG nova.compute.manager [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.458018] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.458188] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.458489] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.458836] env[61545]: INFO nova.compute.manager [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1001.459413] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.460840] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21f41ce-f1bf-4e30-b9d9-3a268eac0ab9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.470106] env[61545]: DEBUG nova.compute.manager [-] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.470279] env[61545]: DEBUG nova.network.neutron [-] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.490613] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquiring lock "7301c541-664f-43ec-8a34-86f38cac22ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.491092] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "7301c541-664f-43ec-8a34-86f38cac22ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.493016] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256235, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.493514] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256234, 'name': Destroy_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.610019] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task} progress is 65%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.619145] env[61545]: DEBUG nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1001.713520] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.713870] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.714115] env[61545]: DEBUG nova.compute.manager [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.715647] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f583a6-c2ad-4f25-94c1-27d89e2de2c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.728318] env[61545]: DEBUG nova.compute.manager [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1001.729170] env[61545]: DEBUG nova.objects.instance [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.775455] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52be0622-6cb2-40f4-88b7-11acd5b40ee2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.785517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af12a83-4a73-40fe-b5ca-2ba3a7ad8301 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.820064] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ae3b6f-f734-403d-99e5-f1ab8a544a23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.829017] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028b0748-7174-4b3b-ba4c-6e8a1d50d08f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.844440] env[61545]: DEBUG nova.compute.provider_tree [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.924586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.925059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.925272] env[61545]: INFO nova.compute.manager [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Shelving [ 1001.950254] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256235, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.957878] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256234, 'name': Destroy_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.000979] env[61545]: DEBUG nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1002.108768] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task} progress is 78%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.127222] env[61545]: DEBUG nova.network.neutron [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Successfully updated port: ac373ca4-eda8-462a-a658-52fa15cb8b0b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.348776] env[61545]: DEBUG nova.scheduler.client.report [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.440439] env[61545]: DEBUG nova.network.neutron [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Successfully created port: 7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1002.455408] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256234, 'name': Destroy_Task, 'duration_secs': 1.504051} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.460680] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Destroyed the VM [ 1002.460990] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1002.461439] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256235, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.461867] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e9835072-65e6-42a4-acf5-f21e10f9bb96 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.473714] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1002.473714] env[61545]: value = "task-4256236" [ 1002.473714] env[61545]: _type = "Task" [ 1002.473714] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.487861] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256236, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.515239] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.516454] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08aa70b9-df27-45c8-b2d9-5fc33fd530e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.531394] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1002.531394] env[61545]: value = "task-4256237" [ 1002.531394] env[61545]: _type = "Task" [ 1002.531394] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.537050] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.547936] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.558814] env[61545]: DEBUG nova.compute.manager [req-d9ba6895-2a67-466f-978b-6244ff51f56d req-ae503e0f-8df9-482a-a466-d3d1d80cd650 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Received event network-vif-plugged-ac373ca4-eda8-462a-a658-52fa15cb8b0b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1002.559286] env[61545]: DEBUG oslo_concurrency.lockutils [req-d9ba6895-2a67-466f-978b-6244ff51f56d req-ae503e0f-8df9-482a-a466-d3d1d80cd650 service nova] Acquiring lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.559627] env[61545]: DEBUG oslo_concurrency.lockutils [req-d9ba6895-2a67-466f-978b-6244ff51f56d req-ae503e0f-8df9-482a-a466-d3d1d80cd650 service nova] Lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.559816] env[61545]: DEBUG oslo_concurrency.lockutils [req-d9ba6895-2a67-466f-978b-6244ff51f56d req-ae503e0f-8df9-482a-a466-d3d1d80cd650 service nova] Lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.559987] env[61545]: DEBUG nova.compute.manager [req-d9ba6895-2a67-466f-978b-6244ff51f56d req-ae503e0f-8df9-482a-a466-d3d1d80cd650 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] No waiting events found dispatching network-vif-plugged-ac373ca4-eda8-462a-a658-52fa15cb8b0b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1002.560304] env[61545]: WARNING nova.compute.manager [req-d9ba6895-2a67-466f-978b-6244ff51f56d req-ae503e0f-8df9-482a-a466-d3d1d80cd650 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Received unexpected event network-vif-plugged-ac373ca4-eda8-462a-a658-52fa15cb8b0b for instance with vm_state building and task_state spawning. [ 1002.611454] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task} progress is 92%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.630776] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "refresh_cache-5a610b1c-df03-4ca6-83ff-ba651edcc8d0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.631042] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "refresh_cache-5a610b1c-df03-4ca6-83ff-ba651edcc8d0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.631235] env[61545]: DEBUG nova.network.neutron [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.638642] env[61545]: DEBUG nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1002.675879] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1002.675879] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.675879] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1002.675879] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.675879] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1002.676329] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1002.676329] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1002.676449] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1002.676682] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1002.676821] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1002.677051] env[61545]: DEBUG nova.virt.hardware [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1002.678261] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9f327b-2764-4879-9bae-abd7ad1b3701 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.693101] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f37b68-7ffa-4bbc-8aff-b585a5fe5efa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.737998] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.738357] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43d9b003-c9dd-4040-8a0f-d98016e6e732 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.743065] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1002.743311] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838754', 'volume_id': 'dc427f98-25af-4605-aa76-6df488552e30', 'name': 'volume-dc427f98-25af-4605-aa76-6df488552e30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1e5be92c-d727-4515-9e16-85ade2719455', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc427f98-25af-4605-aa76-6df488552e30', 'serial': 'dc427f98-25af-4605-aa76-6df488552e30'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1002.744203] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeec0abe-f369-4fa9-a0aa-e52f472a1705 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.748825] env[61545]: DEBUG oslo_vmware.api [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1002.748825] env[61545]: value = "task-4256238" [ 1002.748825] env[61545]: _type = "Task" [ 1002.748825] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.768323] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78480a6e-d98d-448f-b2d3-647492af5ff3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.775230] env[61545]: DEBUG oslo_vmware.api [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256238, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.796589] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.796898] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.797186] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.797435] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.797646] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.807996] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] volume-dc427f98-25af-4605-aa76-6df488552e30/volume-dc427f98-25af-4605-aa76-6df488552e30.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.808796] env[61545]: INFO nova.compute.manager [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Terminating instance [ 1002.811287] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b25331c8-1fc9-4f28-800e-454375720e61 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.836074] env[61545]: DEBUG oslo_vmware.api [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1002.836074] env[61545]: value = "task-4256239" [ 1002.836074] env[61545]: _type = "Task" [ 1002.836074] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.847411] env[61545]: DEBUG oslo_vmware.api [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256239, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.860720] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.864899] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.671s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.864899] env[61545]: DEBUG nova.objects.instance [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lazy-loading 'resources' on Instance uuid eced4107-b99e-479e-b22c-2157320ecf95 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.900176] env[61545]: INFO nova.scheduler.client.report [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleted allocations for instance fff833ad-55af-4702-859b-05f94cac18c8 [ 1002.939832] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256235, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.963658] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.963863] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51febc3d-e876-4676-b1fc-5187771df9f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.973847] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1002.973847] env[61545]: value = "task-4256240" [ 1002.973847] env[61545]: _type = "Task" [ 1002.973847] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.992103] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.995886] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256236, 'name': RemoveSnapshot_Task} progress is 56%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.042358] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256237, 'name': PowerOffVM_Task, 'duration_secs': 0.232039} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.042679] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.042911] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1003.043753] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0869b739-1cba-4562-bab3-5948c87973ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.052854] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1003.052854] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c199caa6-66f6-4a9f-bb1b-3176a98a1df9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.107919] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.130712] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1003.130965] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1003.131202] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleting the datastore file [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.131717] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5577071-22bc-4040-b786-2cb9a2d33390 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.143498] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1003.143498] env[61545]: value = "task-4256242" [ 1003.143498] env[61545]: _type = "Task" [ 1003.143498] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.155827] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256242, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.264537] env[61545]: DEBUG oslo_vmware.api [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256238, 'name': PowerOffVM_Task, 'duration_secs': 0.31825} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.264537] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.264537] env[61545]: DEBUG nova.compute.manager [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.265315] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1c53c1-568a-453f-9576-3182401b2d0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.331663] env[61545]: DEBUG nova.compute.manager [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1003.331924] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1003.333176] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d868007f-0ba5-444b-8724-8082c963e848 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.353841] env[61545]: DEBUG oslo_vmware.api [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256239, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.354177] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1003.355434] env[61545]: DEBUG nova.network.neutron [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.362546] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a45b404-4fbd-4368-a3e8-15671f616d8a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.374507] env[61545]: DEBUG oslo_vmware.api [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 1003.374507] env[61545]: value = "task-4256243" [ 1003.374507] env[61545]: _type = "Task" [ 1003.374507] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.387335] env[61545]: DEBUG oslo_vmware.api [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.409213] env[61545]: DEBUG oslo_concurrency.lockutils [None req-411c98cf-3457-4662-8e2d-f6f7b3ea3ef9 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "fff833ad-55af-4702-859b-05f94cac18c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.606s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.443386] env[61545]: DEBUG oslo_vmware.api [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256235, 'name': PowerOnVM_Task, 'duration_secs': 1.590904} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.448532] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.448932] env[61545]: INFO nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Took 13.09 seconds to spawn the instance on the hypervisor. [ 1003.449265] env[61545]: DEBUG nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.452421] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9120047-d5b8-4b9f-9d57-1cd25e4af5cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.492048] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256236, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.496404] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256240, 'name': PowerOffVM_Task, 'duration_secs': 0.247187} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.496404] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.499045] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c044d5e3-86e6-4db5-90d1-62d572291f9f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.523028] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08475df9-4e56-4362-b90d-9f0ce735616c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.607552] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task} progress is 98%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.664315] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256242, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273816} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.666136] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.666136] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.666136] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.747683] env[61545]: DEBUG nova.network.neutron [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Updating instance_info_cache with network_info: [{"id": "ac373ca4-eda8-462a-a658-52fa15cb8b0b", "address": "fa:16:3e:fb:6e:82", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac373ca4-ed", "ovs_interfaceid": "ac373ca4-eda8-462a-a658-52fa15cb8b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.785915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bcfc000d-601b-44bf-b77d-7c465f5774cb tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.072s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.852406] env[61545]: DEBUG oslo_vmware.api [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256239, 'name': ReconfigVM_Task, 'duration_secs': 0.952474} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.852725] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfigured VM instance instance-00000041 to attach disk [datastore1] volume-dc427f98-25af-4605-aa76-6df488552e30/volume-dc427f98-25af-4605-aa76-6df488552e30.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.860551] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9714602-7134-44fe-9321-ef7314345021 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.871838] env[61545]: DEBUG nova.network.neutron [-] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.880853] env[61545]: DEBUG oslo_vmware.api [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1003.880853] env[61545]: value = "task-4256244" [ 1003.880853] env[61545]: _type = "Task" [ 1003.880853] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.889804] env[61545]: DEBUG oslo_vmware.api [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256243, 'name': PowerOffVM_Task, 'duration_secs': 0.244358} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.892127] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.892127] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1003.892446] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ffa8584-c07a-463f-936e-30021f0c9bba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.897960] env[61545]: DEBUG oslo_vmware.api [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256244, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.992749] env[61545]: INFO nova.compute.manager [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Took 33.08 seconds to build instance. [ 1004.000047] env[61545]: DEBUG oslo_vmware.api [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256236, 'name': RemoveSnapshot_Task, 'duration_secs': 1.033696} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.000182] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1004.018755] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661f3ae2-3263-49e3-964b-db88392386fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.028770] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1710e041-138a-4f61-b791-dbc625ef7058 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.061256] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1004.063114] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-184aa402-5ffd-4ced-8e72-cef2157c33b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.065602] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4272cc9e-4e24-4c2b-96ae-24c9992fd772 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.077284] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b91ad34-21a3-405e-8b78-d98964fc4eef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.082161] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1004.082161] env[61545]: value = "task-4256246" [ 1004.082161] env[61545]: _type = "Task" [ 1004.082161] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.095697] env[61545]: DEBUG nova.compute.provider_tree [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.105962] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256246, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.113879] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256228, 'name': RelocateVM_Task, 'duration_secs': 3.637425} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.114393] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1004.114591] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838731', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'name': 'volume-11641b07-7823-42c5-8e71-d45453cc6704', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b', 'attached_at': '', 'detached_at': '', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'serial': '11641b07-7823-42c5-8e71-d45453cc6704'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1004.115627] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf533e3-529a-428a-b20d-4846152a4e1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.135537] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdde018-ca74-4e66-b863-1fee73f946a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.138429] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1004.138743] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1004.138937] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Deleting the datastore file [datastore2] ced5bde7-07b9-4d07-8b13-49f6fb006eed {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.139260] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee1e6035-bbd7-46c8-8394-5dd459ec1f7c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.161839] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] volume-11641b07-7823-42c5-8e71-d45453cc6704/volume-11641b07-7823-42c5-8e71-d45453cc6704.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1004.163503] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3a57db8-6c7c-4535-bc0c-51841f070a6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.177734] env[61545]: DEBUG oslo_vmware.api [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for the task: (returnval){ [ 1004.177734] env[61545]: value = "task-4256247" [ 1004.177734] env[61545]: _type = "Task" [ 1004.177734] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.188269] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1004.188269] env[61545]: value = "task-4256248" [ 1004.188269] env[61545]: _type = "Task" [ 1004.188269] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.194130] env[61545]: DEBUG oslo_vmware.api [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256247, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.200649] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256248, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.251315] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "refresh_cache-5a610b1c-df03-4ca6-83ff-ba651edcc8d0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.251645] env[61545]: DEBUG nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Instance network_info: |[{"id": "ac373ca4-eda8-462a-a658-52fa15cb8b0b", "address": "fa:16:3e:fb:6e:82", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac373ca4-ed", "ovs_interfaceid": "ac373ca4-eda8-462a-a658-52fa15cb8b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1004.252079] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:6e:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d0c6fd7-3cc9-4818-9475-8f15900394cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac373ca4-eda8-462a-a658-52fa15cb8b0b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.259951] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.260253] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.260510] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59610fd5-1cad-468e-89a1-62418552d25a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.283009] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.283009] env[61545]: value = "task-4256249" [ 1004.283009] env[61545]: _type = "Task" [ 1004.283009] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.293293] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256249, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.376671] env[61545]: INFO nova.compute.manager [-] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Took 2.91 seconds to deallocate network for instance. [ 1004.399080] env[61545]: DEBUG oslo_vmware.api [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256244, 'name': ReconfigVM_Task, 'duration_secs': 0.172773} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.399533] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838754', 'volume_id': 'dc427f98-25af-4605-aa76-6df488552e30', 'name': 'volume-dc427f98-25af-4605-aa76-6df488552e30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1e5be92c-d727-4515-9e16-85ade2719455', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc427f98-25af-4605-aa76-6df488552e30', 'serial': 'dc427f98-25af-4605-aa76-6df488552e30'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1004.495191] env[61545]: DEBUG oslo_concurrency.lockutils [None req-707cdeff-a2e2-48c2-b7ec-42506dee48bb tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "fed2c050-74e7-48f1-8a19-7c58e26d2159" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.597s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.510180] env[61545]: WARNING nova.compute.manager [None req-ec1433cf-dbe7-42c0-9272-02eb4077eec4 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Image not found during snapshot: nova.exception.ImageNotFound: Image 3e5b9c96-51bb-41c7-82aa-57f618f730be could not be found. [ 1004.594045] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256246, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.603383] env[61545]: DEBUG nova.scheduler.client.report [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1004.704616] env[61545]: DEBUG oslo_vmware.api [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Task: {'id': task-4256247, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.272525} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.708993] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.709290] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.709568] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.709813] env[61545]: INFO nova.compute.manager [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Took 1.38 seconds to destroy the instance on the hypervisor. [ 1004.710144] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.710424] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256248, 'name': ReconfigVM_Task, 'duration_secs': 0.307119} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.710676] env[61545]: DEBUG nova.compute.manager [-] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1004.710874] env[61545]: DEBUG nova.network.neutron [-] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1004.712846] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Reconfigured VM instance instance-0000004c to attach disk [datastore2] volume-11641b07-7823-42c5-8e71-d45453cc6704/volume-11641b07-7823-42c5-8e71-d45453cc6704.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.719953] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c68d9ac8-6e05-4f63-a408-20b52ec61b63 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.737645] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1004.737968] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.738141] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1004.738328] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.738588] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1004.738753] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1004.739111] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1004.739215] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1004.739383] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1004.739640] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1004.739831] env[61545]: DEBUG nova.virt.hardware [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1004.740899] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc6d23b-a1b8-430c-9c9e-73cbb305e989 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.744876] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1004.744876] env[61545]: value = "task-4256250" [ 1004.744876] env[61545]: _type = "Task" [ 1004.744876] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.753099] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ebc6a1-b1df-4781-8ae9-dff0b56b41e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.762368] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256250, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.775984] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:ab:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd289bc06-c77a-460a-b15d-e94dcfb3ff53', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.785651] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.786121] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.789409] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-082c2d9d-00a4-460f-86c2-fc4025053fc7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.809507] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256249, 'name': CreateVM_Task, 'duration_secs': 0.394472} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.810832] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.811040] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.811040] env[61545]: value = "task-4256251" [ 1004.811040] env[61545]: _type = "Task" [ 1004.811040] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.811784] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.812047] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.812469] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1004.812822] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-babc70e0-eea6-4df7-b357-3086874e8511 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.822021] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1004.822021] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524a8149-6841-c60b-a75d-f154943f56ea" [ 1004.822021] env[61545]: _type = "Task" [ 1004.822021] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.824980] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256251, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.838166] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.838473] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Processing image 59ed3cb3-d35d-4df7-8ff8-ffc462b36b78 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.838689] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.838842] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.839032] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.839306] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-113db6de-6ae2-4b67-9fc3-600f5d2eccdd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.851996] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.852105] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.853273] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4b6b8fb-1115-418b-a5b1-eafd7e70268a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.860639] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1004.860639] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523ed7c7-1b71-562b-393f-2ab444d787f8" [ 1004.860639] env[61545]: _type = "Task" [ 1004.860639] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.871882] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523ed7c7-1b71-562b-393f-2ab444d787f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.889184] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.937183] env[61545]: DEBUG nova.compute.manager [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Received event network-changed-ac373ca4-eda8-462a-a658-52fa15cb8b0b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1004.937334] env[61545]: DEBUG nova.compute.manager [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Refreshing instance network info cache due to event network-changed-ac373ca4-eda8-462a-a658-52fa15cb8b0b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1004.937790] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] Acquiring lock "refresh_cache-5a610b1c-df03-4ca6-83ff-ba651edcc8d0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.938152] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] Acquired lock "refresh_cache-5a610b1c-df03-4ca6-83ff-ba651edcc8d0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.938895] env[61545]: DEBUG nova.network.neutron [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Refreshing network info cache for port ac373ca4-eda8-462a-a658-52fa15cb8b0b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.959982] env[61545]: DEBUG nova.objects.instance [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.043354] env[61545]: DEBUG nova.compute.manager [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Stashing vm_state: stopped {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1005.093564] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256246, 'name': CreateSnapshot_Task, 'duration_secs': 0.705391} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.093849] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1005.094650] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd7403a-569d-416b-ab0b-3cf42e7d036b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.110029] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.246s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.112336] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.597s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.112565] env[61545]: DEBUG nova.objects.instance [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'resources' on Instance uuid 5ba53915-ab57-493e-b2e1-7f3d1b3845ee {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.132604] env[61545]: INFO nova.scheduler.client.report [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Deleted allocations for instance eced4107-b99e-479e-b22c-2157320ecf95 [ 1005.257422] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256250, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.327104] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256251, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.373034] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Preparing fetch location {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1005.373581] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Fetch image to [datastore2] OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a/OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a.vmdk {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1005.376018] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Downloading stream optimized image 59ed3cb3-d35d-4df7-8ff8-ffc462b36b78 to [datastore2] OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a/OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a.vmdk on the data store datastore2 as vApp {{(pid=61545) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1005.376018] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Downloading image file data 59ed3cb3-d35d-4df7-8ff8-ffc462b36b78 to the ESX as VM named 'OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a' {{(pid=61545) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1005.466249] env[61545]: DEBUG oslo_concurrency.lockutils [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.466466] env[61545]: DEBUG oslo_concurrency.lockutils [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.467747] env[61545]: DEBUG nova.network.neutron [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1005.467747] env[61545]: DEBUG nova.objects.instance [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'info_cache' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.472473] env[61545]: DEBUG nova.objects.instance [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lazy-loading 'flavor' on Instance uuid 1e5be92c-d727-4515-9e16-85ade2719455 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.480494] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1005.480494] env[61545]: value = "resgroup-9" [ 1005.480494] env[61545]: _type = "ResourcePool" [ 1005.480494] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1005.480811] env[61545]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-08fd0777-2473-408c-a7da-40753a86f9b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.510745] env[61545]: DEBUG nova.network.neutron [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Successfully updated port: 7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.513741] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lease: (returnval){ [ 1005.513741] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528691cc-df2c-3257-1af0-e9389f19a200" [ 1005.513741] env[61545]: _type = "HttpNfcLease" [ 1005.513741] env[61545]: } obtained for vApp import into resource pool (val){ [ 1005.513741] env[61545]: value = "resgroup-9" [ 1005.513741] env[61545]: _type = "ResourcePool" [ 1005.513741] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1005.514207] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the lease: (returnval){ [ 1005.514207] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528691cc-df2c-3257-1af0-e9389f19a200" [ 1005.514207] env[61545]: _type = "HttpNfcLease" [ 1005.514207] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1005.521659] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1005.521659] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528691cc-df2c-3257-1af0-e9389f19a200" [ 1005.521659] env[61545]: _type = "HttpNfcLease" [ 1005.521659] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1005.572771] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.617344] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1005.621897] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-645a8961-c5ce-4840-9f50-471786cce607 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.635354] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1005.635354] env[61545]: value = "task-4256253" [ 1005.635354] env[61545]: _type = "Task" [ 1005.635354] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.647232] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256253, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.648018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-36739a5f-ecd6-4406-9091-da329d4c7a76 tempest-MigrationsAdminTest-1836932771 tempest-MigrationsAdminTest-1836932771-project-member] Lock "eced4107-b99e-479e-b22c-2157320ecf95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.489s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.759812] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256250, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.830302] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256251, 'name': CreateVM_Task, 'duration_secs': 0.579409} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.833350] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1005.834320] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.834469] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.834787] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1005.835084] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2baddf6-f768-40a1-a075-dad96010d7ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.840701] env[61545]: DEBUG nova.network.neutron [-] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.848904] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1005.848904] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5245339f-5906-37d4-6e78-4d11f48444e9" [ 1005.848904] env[61545]: _type = "Task" [ 1005.848904] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.864597] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5245339f-5906-37d4-6e78-4d11f48444e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.971134] env[61545]: DEBUG nova.network.neutron [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Updated VIF entry in instance network info cache for port ac373ca4-eda8-462a-a658-52fa15cb8b0b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1005.971523] env[61545]: DEBUG nova.network.neutron [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Updating instance_info_cache with network_info: [{"id": "ac373ca4-eda8-462a-a658-52fa15cb8b0b", "address": "fa:16:3e:fb:6e:82", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac373ca4-ed", "ovs_interfaceid": "ac373ca4-eda8-462a-a658-52fa15cb8b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.973756] env[61545]: DEBUG nova.objects.base [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1005.983531] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c706331e-623c-42e4-a1c0-74330f31a533 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.394s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.017197] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.020633] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.020633] env[61545]: DEBUG nova.network.neutron [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.030955] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1006.030955] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528691cc-df2c-3257-1af0-e9389f19a200" [ 1006.030955] env[61545]: _type = "HttpNfcLease" [ 1006.030955] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1006.114706] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b1929c-f580-4529-ac8f-00432fb83e30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.123988] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c731e713-b168-48cb-8bca-b73dc98a7d04 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.164302] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eab89e1-7ce4-433d-abaf-d551a97d7d7b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.176607] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256253, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.178350] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d12350-6937-4017-9a85-77dec4db99a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.196449] env[61545]: DEBUG nova.compute.provider_tree [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.259317] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256250, 'name': ReconfigVM_Task, 'duration_secs': 1.208468} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.259633] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838731', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'name': 'volume-11641b07-7823-42c5-8e71-d45453cc6704', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b', 'attached_at': '', 'detached_at': '', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'serial': '11641b07-7823-42c5-8e71-d45453cc6704'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1006.260222] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84aed346-2cfe-42ee-b080-3161440086a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.268274] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1006.268274] env[61545]: value = "task-4256254" [ 1006.268274] env[61545]: _type = "Task" [ 1006.268274] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.278908] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256254, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.343454] env[61545]: INFO nova.compute.manager [-] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Took 1.63 seconds to deallocate network for instance. [ 1006.360883] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5245339f-5906-37d4-6e78-4d11f48444e9, 'name': SearchDatastore_Task, 'duration_secs': 0.021286} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.361241] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.361496] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.361746] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.361915] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.362580] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1006.362776] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d57fe0fd-72f3-4f98-9dce-ff326e0e1449 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.375827] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1006.375827] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1006.376457] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bdf4a9a-793c-42ff-93b5-663bbccd6508 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.382505] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1006.382505] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d15d9a-f02f-3062-bee8-be499c0ec7b3" [ 1006.382505] env[61545]: _type = "Task" [ 1006.382505] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.392559] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d15d9a-f02f-3062-bee8-be499c0ec7b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.476126] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] Releasing lock "refresh_cache-5a610b1c-df03-4ca6-83ff-ba651edcc8d0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.476371] env[61545]: DEBUG nova.compute.manager [req-7e35a753-357e-4189-b3da-fcaa019420e0 req-c6a7862e-2b6d-4746-b228-908e8074cf53 service nova] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Received event network-vif-deleted-af2c9b85-3238-4b4f-b74f-b72d7b575e73 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1006.532205] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1006.532205] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528691cc-df2c-3257-1af0-e9389f19a200" [ 1006.532205] env[61545]: _type = "HttpNfcLease" [ 1006.532205] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1006.535241] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1006.535241] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528691cc-df2c-3257-1af0-e9389f19a200" [ 1006.535241] env[61545]: _type = "HttpNfcLease" [ 1006.535241] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1006.535600] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b291d6e-1c08-420c-82ca-7cd190dc81f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.546402] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52320ce8-d92d-0f69-5187-004596132688/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1006.546402] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52320ce8-d92d-0f69-5187-004596132688/disk-0.vmdk. {{(pid=61545) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1006.549948] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "665db895-52ce-4e7c-9a78-86db5b695534" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.549948] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "665db895-52ce-4e7c-9a78-86db5b695534" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.549948] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "665db895-52ce-4e7c-9a78-86db5b695534-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.550423] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "665db895-52ce-4e7c-9a78-86db5b695534-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.552022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "665db895-52ce-4e7c-9a78-86db5b695534-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.623475] env[61545]: DEBUG nova.network.neutron [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.626360] env[61545]: INFO nova.compute.manager [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Terminating instance [ 1006.634109] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-50bce3b7-74ab-4227-840f-d81e6fbca124 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.673332] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256253, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.700670] env[61545]: DEBUG nova.scheduler.client.report [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.781043] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256254, 'name': Rename_Task, 'duration_secs': 0.152703} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.781312] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.781597] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f92dffa-49a1-44bf-968d-76e99aadfbba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.789440] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1006.789440] env[61545]: value = "task-4256255" [ 1006.789440] env[61545]: _type = "Task" [ 1006.789440] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.798815] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.831462] env[61545]: INFO nova.compute.manager [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Rescuing [ 1006.831778] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.831971] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.832156] env[61545]: DEBUG nova.network.neutron [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.857579] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.895339] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d15d9a-f02f-3062-bee8-be499c0ec7b3, 'name': SearchDatastore_Task, 'duration_secs': 0.021001} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.898794] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab8a4295-1eb3-4837-8fff-1a504f8dbacd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.906900] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquiring lock "fed2c050-74e7-48f1-8a19-7c58e26d2159" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.906900] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "fed2c050-74e7-48f1-8a19-7c58e26d2159" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.907103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquiring lock "fed2c050-74e7-48f1-8a19-7c58e26d2159-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.907290] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "fed2c050-74e7-48f1-8a19-7c58e26d2159-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.907570] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "fed2c050-74e7-48f1-8a19-7c58e26d2159-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.910390] env[61545]: INFO nova.compute.manager [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Terminating instance [ 1006.916898] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1006.916898] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b68ace-4ce2-2b4f-b24e-5f1406cab449" [ 1006.916898] env[61545]: _type = "Task" [ 1006.916898] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.933262] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b68ace-4ce2-2b4f-b24e-5f1406cab449, 'name': SearchDatastore_Task, 'duration_secs': 0.013344} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.936720] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.937589] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.938329] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2687c712-2c53-45a7-946f-49c5c953168e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.956464] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1006.956464] env[61545]: value = "task-4256256" [ 1006.956464] env[61545]: _type = "Task" [ 1006.956464] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.969195] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.041536] env[61545]: DEBUG nova.network.neutron [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Updating instance_info_cache with network_info: [{"id": "7e18e278-e525-407c-90fa-107184503c1c", "address": "fa:16:3e:c8:0e:6d", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e18e278-e5", "ovs_interfaceid": "7e18e278-e525-407c-90fa-107184503c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.134796] env[61545]: DEBUG nova.compute.manager [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1007.135124] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.136163] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd26b0cd-ad1b-4b02-9e80-4228ad3c210e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.148404] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.151082] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3275d23-0dcf-4e4e-ae9d-3d8ddde4efce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.159093] env[61545]: DEBUG nova.network.neutron [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updating instance_info_cache with network_info: [{"id": "989b3fc6-0843-488f-9af2-39bb487eb78a", "address": "fa:16:3e:40:56:28", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989b3fc6-08", "ovs_interfaceid": "989b3fc6-0843-488f-9af2-39bb487eb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.167546] env[61545]: DEBUG oslo_vmware.api [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1007.167546] env[61545]: value = "task-4256257" [ 1007.167546] env[61545]: _type = "Task" [ 1007.167546] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.184894] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256253, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.194231] env[61545]: DEBUG oslo_vmware.api [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.208148] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.095s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.211893] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.852s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.211893] env[61545]: DEBUG nova.objects.instance [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1007.256674] env[61545]: INFO nova.scheduler.client.report [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted allocations for instance 5ba53915-ab57-493e-b2e1-7f3d1b3845ee [ 1007.310293] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256255, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.381183] env[61545]: DEBUG nova.compute.manager [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Received event network-vif-plugged-7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1007.381183] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] Acquiring lock "855904d8-7eb3-405d-9236-ab4ba9b33940-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.381183] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] Lock "855904d8-7eb3-405d-9236-ab4ba9b33940-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.381413] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] Lock "855904d8-7eb3-405d-9236-ab4ba9b33940-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.381566] env[61545]: DEBUG nova.compute.manager [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] No waiting events found dispatching network-vif-plugged-7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1007.381790] env[61545]: WARNING nova.compute.manager [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Received unexpected event network-vif-plugged-7e18e278-e525-407c-90fa-107184503c1c for instance with vm_state building and task_state spawning. [ 1007.381925] env[61545]: DEBUG nova.compute.manager [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Received event network-vif-deleted-16667e69-57e6-426e-8b7e-0da6159f84bb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1007.382136] env[61545]: DEBUG nova.compute.manager [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Received event network-changed-7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1007.382330] env[61545]: DEBUG nova.compute.manager [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Refreshing instance network info cache due to event network-changed-7e18e278-e525-407c-90fa-107184503c1c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1007.385392] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] Acquiring lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.424230] env[61545]: DEBUG nova.compute.manager [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1007.424230] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.425861] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401338cf-2bb9-4037-a4ed-fb25cad22239 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.439181] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.440907] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bed1372-1852-4aaa-b314-7564a39f132e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.450283] env[61545]: DEBUG oslo_vmware.api [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 1007.450283] env[61545]: value = "task-4256258" [ 1007.450283] env[61545]: _type = "Task" [ 1007.450283] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.468055] env[61545]: DEBUG oslo_vmware.api [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.475950] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256256, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.545042] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.545707] env[61545]: DEBUG nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Instance network_info: |[{"id": "7e18e278-e525-407c-90fa-107184503c1c", "address": "fa:16:3e:c8:0e:6d", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e18e278-e5", "ovs_interfaceid": "7e18e278-e525-407c-90fa-107184503c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1007.546268] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] Acquired lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.546771] env[61545]: DEBUG nova.network.neutron [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Refreshing network info cache for port 7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.552023] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:0e:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10ff2092-e8eb-4768-ad4a-65a80560b447', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e18e278-e525-407c-90fa-107184503c1c', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.558445] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.560281] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.560439] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79892d50-279e-464b-a090-ad817273319e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.587808] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.587808] env[61545]: value = "task-4256259" [ 1007.587808] env[61545]: _type = "Task" [ 1007.587808] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.601667] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256259, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.604487] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Completed reading data from the image iterator. {{(pid=61545) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1007.604894] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52320ce8-d92d-0f69-5187-004596132688/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1007.606391] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d392d35-acc1-408e-8760-81afb58319b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.616510] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52320ce8-d92d-0f69-5187-004596132688/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1007.617145] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52320ce8-d92d-0f69-5187-004596132688/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1007.617145] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-77f432bf-875b-4e40-a8fc-6b43b1e8eae6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.675286] env[61545]: DEBUG oslo_concurrency.lockutils [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Releasing lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.677246] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256253, 'name': CloneVM_Task, 'duration_secs': 1.73647} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.684229] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Created linked-clone VM from snapshot [ 1007.687878] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc9f065-7c09-43e7-af6c-e08ffcedf28d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.701271] env[61545]: DEBUG oslo_vmware.api [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256257, 'name': PowerOffVM_Task, 'duration_secs': 0.454897} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.707134] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.707407] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.708372] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Uploading image 5bc7010a-ee45-48d1-87c9-e47216be1200 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1007.711603] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34cfe9f3-cc62-4c9b-86db-fba8915569fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.750834] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1007.750834] env[61545]: value = "vm-838763" [ 1007.750834] env[61545]: _type = "VirtualMachine" [ 1007.750834] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1007.751288] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2c078150-e0a3-4c39-b3a7-501f5bb51607 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.761212] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lease: (returnval){ [ 1007.761212] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9ac5e-664c-0886-a353-96bc49fa3670" [ 1007.761212] env[61545]: _type = "HttpNfcLease" [ 1007.761212] env[61545]: } obtained for exporting VM: (result){ [ 1007.761212] env[61545]: value = "vm-838763" [ 1007.761212] env[61545]: _type = "VirtualMachine" [ 1007.761212] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1007.761642] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the lease: (returnval){ [ 1007.761642] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9ac5e-664c-0886-a353-96bc49fa3670" [ 1007.761642] env[61545]: _type = "HttpNfcLease" [ 1007.761642] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1007.768937] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4fb25fb8-f143-4e5a-b681-04e3b64f1113 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "5ba53915-ab57-493e-b2e1-7f3d1b3845ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.433s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.772714] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1007.772714] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9ac5e-664c-0886-a353-96bc49fa3670" [ 1007.772714] env[61545]: _type = "HttpNfcLease" [ 1007.772714] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1007.782115] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1007.782403] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1007.782614] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleting the datastore file [datastore2] 665db895-52ce-4e7c-9a78-86db5b695534 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.782892] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71b5f3a8-7d69-4eb6-be86-05e92149a64c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.791733] env[61545]: DEBUG oslo_vmware.api [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1007.791733] env[61545]: value = "task-4256262" [ 1007.791733] env[61545]: _type = "Task" [ 1007.791733] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.809862] env[61545]: DEBUG oslo_vmware.api [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.813094] env[61545]: DEBUG oslo_vmware.api [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256255, 'name': PowerOnVM_Task, 'duration_secs': 0.598228} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.813380] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.813610] env[61545]: INFO nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Took 10.76 seconds to spawn the instance on the hypervisor. [ 1007.813795] env[61545]: DEBUG nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.814692] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57dfcd6-57db-43a8-b9a7-0e7e87ec0316 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.832022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.832022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.832022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.832022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.832022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.839422] env[61545]: INFO nova.compute.manager [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Terminating instance [ 1007.851323] env[61545]: DEBUG oslo_vmware.rw_handles [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52320ce8-d92d-0f69-5187-004596132688/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1007.851657] env[61545]: INFO nova.virt.vmwareapi.images [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Downloaded image file data 59ed3cb3-d35d-4df7-8ff8-ffc462b36b78 [ 1007.853387] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6ed212-1616-41a2-bea4-5a7a8337ee73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.875636] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26a12a1d-e3d8-46f7-8971-074d64b95acb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.912938] env[61545]: INFO nova.virt.vmwareapi.images [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] The imported VM was unregistered [ 1007.916022] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Caching image {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1007.916425] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating directory with path [datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.917232] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48195efb-1e0a-41a5-bd84-97fa8a111d3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.931293] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Created directory with path [datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.931293] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a/OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a.vmdk to [datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78.vmdk. {{(pid=61545) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1007.931566] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-1d941656-77d5-446b-b23d-2b1cb0c379b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.939745] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1007.939745] env[61545]: value = "task-4256264" [ 1007.939745] env[61545]: _type = "Task" [ 1007.939745] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.945724] env[61545]: DEBUG nova.network.neutron [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating instance_info_cache with network_info: [{"id": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "address": "fa:16:3e:38:0c:1c", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fdf9f41-2f", "ovs_interfaceid": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.954043] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256264, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.960834] env[61545]: DEBUG oslo_vmware.api [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256258, 'name': PowerOffVM_Task, 'duration_secs': 0.254246} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.964587] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.964927] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.965770] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26833822-af50-4326-9c50-8781b4d099cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.974306] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256256, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601048} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.974306] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1007.974306] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.974306] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e0723f7-60b7-4fdf-aece-41cd0fadf8db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.982756] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1007.982756] env[61545]: value = "task-4256266" [ 1007.982756] env[61545]: _type = "Task" [ 1007.982756] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.996037] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.038377] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.038554] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.038670] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Deleting the datastore file [datastore2] fed2c050-74e7-48f1-8a19-7c58e26d2159 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.038962] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08777bd7-bcd5-4275-9c95-070452e4de88 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.047318] env[61545]: DEBUG oslo_vmware.api [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for the task: (returnval){ [ 1008.047318] env[61545]: value = "task-4256267" [ 1008.047318] env[61545]: _type = "Task" [ 1008.047318] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.059077] env[61545]: DEBUG oslo_vmware.api [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.100477] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256259, 'name': CreateVM_Task, 'duration_secs': 0.419491} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.100626] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.101718] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.101879] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.102220] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1008.102497] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e68da112-50b3-4924-bbec-6716156c48c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.110489] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1008.110489] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c3954a-eee5-f181-3b0c-0a14da4b8310" [ 1008.110489] env[61545]: _type = "Task" [ 1008.110489] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.119637] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c3954a-eee5-f181-3b0c-0a14da4b8310, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.230887] env[61545]: DEBUG oslo_concurrency.lockutils [None req-928c80c1-a88e-45a3-a4a5-f43b80864714 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.236027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.695s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.238584] env[61545]: INFO nova.compute.claims [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.273339] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1008.273339] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9ac5e-664c-0886-a353-96bc49fa3670" [ 1008.273339] env[61545]: _type = "HttpNfcLease" [ 1008.273339] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1008.273339] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1008.273339] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a9ac5e-664c-0886-a353-96bc49fa3670" [ 1008.273339] env[61545]: _type = "HttpNfcLease" [ 1008.273339] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1008.274014] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07e4493-bdbc-41f0-a630-09272e6cd8a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.296318] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe831b-51e9-8386-2cf2-3b8b30c3305d/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1008.296593] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe831b-51e9-8386-2cf2-3b8b30c3305d/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1008.371064] env[61545]: DEBUG nova.compute.manager [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1008.371064] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.379499] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6676fc85-be83-4751-be5a-1db28adfe059 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.383469] env[61545]: DEBUG oslo_vmware.api [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214142} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.383732] env[61545]: INFO nova.compute.manager [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Took 24.17 seconds to build instance. [ 1008.385546] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.385759] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.385947] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.386130] env[61545]: INFO nova.compute.manager [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1008.386376] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1008.388059] env[61545]: DEBUG nova.compute.manager [-] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1008.388059] env[61545]: DEBUG nova.network.neutron [-] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1008.394478] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.394798] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd1e06ca-2fe7-4d9f-99bf-d2508053dcf8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.403510] env[61545]: DEBUG oslo_vmware.api [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 1008.403510] env[61545]: value = "task-4256268" [ 1008.403510] env[61545]: _type = "Task" [ 1008.403510] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.415952] env[61545]: DEBUG oslo_vmware.api [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256268, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.451374] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256264, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.453244] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.497909] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088351} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.499046] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1008.500196] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba99934-30bb-440e-9f0d-6e98d8016940 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.527263] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1008.530796] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff6d2bbf-6e5a-404f-b5d6-a6790b301146 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.553648] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1008.553648] env[61545]: value = "task-4256269" [ 1008.553648] env[61545]: _type = "Task" [ 1008.553648] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.564958] env[61545]: DEBUG oslo_vmware.api [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.570471] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256269, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.623041] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c3954a-eee5-f181-3b0c-0a14da4b8310, 'name': SearchDatastore_Task, 'duration_secs': 0.011205} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.623347] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.623631] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.623916] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.624121] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.624336] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.624722] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e81029d-11e4-42d1-a8a5-c97f1764d971 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.646963] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.647340] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1008.648408] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b20466ea-c48f-459e-a78b-0141fd6b2c56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.655706] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1008.655706] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52733eb1-d1fd-63fe-bf6d-e1c065d1170e" [ 1008.655706] env[61545]: _type = "Task" [ 1008.655706] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.661403] env[61545]: DEBUG nova.network.neutron [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Updated VIF entry in instance network info cache for port 7e18e278-e525-407c-90fa-107184503c1c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.662048] env[61545]: DEBUG nova.network.neutron [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Updating instance_info_cache with network_info: [{"id": "7e18e278-e525-407c-90fa-107184503c1c", "address": "fa:16:3e:c8:0e:6d", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e18e278-e5", "ovs_interfaceid": "7e18e278-e525-407c-90fa-107184503c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.669694] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52733eb1-d1fd-63fe-bf6d-e1c065d1170e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.682766] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.683729] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-713c542e-8ba4-4423-86f0-4f1e947ad8a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.692489] env[61545]: DEBUG oslo_vmware.api [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1008.692489] env[61545]: value = "task-4256270" [ 1008.692489] env[61545]: _type = "Task" [ 1008.692489] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.704497] env[61545]: DEBUG oslo_vmware.api [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.786962] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0dc31ad0-212c-4f4f-8f1a-e1a125de466b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.893762] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f6b282cd-d64b-48ea-9eeb-f0aa4a64ca7f tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.695s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.917436] env[61545]: DEBUG oslo_vmware.api [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256268, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.954850] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256264, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.067184] env[61545]: DEBUG oslo_vmware.api [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.078825] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.172024] env[61545]: DEBUG oslo_concurrency.lockutils [req-bf6c12dc-56e9-4387-9fc1-2e8961234dec req-892496b1-242b-4642-9b98-2a272512a263 service nova] Releasing lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.172024] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52733eb1-d1fd-63fe-bf6d-e1c065d1170e, 'name': SearchDatastore_Task, 'duration_secs': 0.088308} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.172897] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5e9f68e-cbde-4d6f-a8c4-ef4c339425b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.182248] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1009.182248] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522efc8a-0523-69ab-22fd-ddc560d0501e" [ 1009.182248] env[61545]: _type = "Task" [ 1009.182248] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.198020] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522efc8a-0523-69ab-22fd-ddc560d0501e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.209145] env[61545]: DEBUG oslo_vmware.api [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256270, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.424724] env[61545]: DEBUG oslo_vmware.api [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256268, 'name': PowerOffVM_Task, 'duration_secs': 0.913859} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.424850] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.425019] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.425525] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1a1fd75-53a8-4cd6-a498-22176621c04c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.466495] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256264, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.576217] env[61545]: DEBUG oslo_vmware.api [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Task: {'id': task-4256267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.045356} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.583362] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.583588] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.583853] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.584892] env[61545]: INFO nova.compute.manager [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Took 2.16 seconds to destroy the instance on the hypervisor. [ 1009.584892] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.584892] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.585354] env[61545]: DEBUG nova.compute.manager [-] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1009.585461] env[61545]: DEBUG nova.network.neutron [-] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.698087] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522efc8a-0523-69ab-22fd-ddc560d0501e, 'name': SearchDatastore_Task, 'duration_secs': 0.104882} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.707202] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.707202] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 855904d8-7eb3-405d-9236-ab4ba9b33940/855904d8-7eb3-405d-9236-ab4ba9b33940.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.707835] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d6150f2-cb7d-43fd-8989-e2ed38250d79 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.722027] env[61545]: DEBUG oslo_vmware.api [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256270, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.722027] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1009.722027] env[61545]: value = "task-4256272" [ 1009.722027] env[61545]: _type = "Task" [ 1009.722027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.731887] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.751174] env[61545]: DEBUG nova.network.neutron [-] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.869023] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65f8c54-2640-4e50-81d3-89e199eec25d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.880325] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950175b6-c8a7-40ba-9408-bc97d8d67919 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.913980] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325e3569-d2dc-4195-ad2f-2e18b8c10146 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.923068] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd6a777-cb51-4963-84ae-b9a880cea892 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.940505] env[61545]: DEBUG nova.compute.provider_tree [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.960887] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256264, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.000076] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1010.001676] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3470eb5-8225-4af6-9f7d-5692648973bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.009489] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1010.009489] env[61545]: value = "task-4256273" [ 1010.009489] env[61545]: _type = "Task" [ 1010.009489] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.022739] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.074682] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256269, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.170394] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.170394] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.170394] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Deleting the datastore file [datastore2] 73df6df0-ead6-49cd-8b0a-5e95acfc7e15 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.170394] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a62d959-c113-4889-a202-52c2e44035f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.185954] env[61545]: DEBUG oslo_vmware.api [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for the task: (returnval){ [ 1010.185954] env[61545]: value = "task-4256274" [ 1010.185954] env[61545]: _type = "Task" [ 1010.185954] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.200550] env[61545]: DEBUG nova.compute.manager [req-8b583fa5-b97a-4e5d-963b-9c35163549e0 req-c919b8bf-ef30-4a6e-8584-9a448093f577 service nova] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Received event network-vif-deleted-2019c08e-1b99-4d7b-96f0-32e559d30daf {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1010.214439] env[61545]: DEBUG oslo_vmware.api [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.220157] env[61545]: DEBUG oslo_vmware.api [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256270, 'name': PowerOnVM_Task} progress is 70%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.234188] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.254788] env[61545]: INFO nova.compute.manager [-] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Took 1.87 seconds to deallocate network for instance. [ 1010.445200] env[61545]: DEBUG nova.scheduler.client.report [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1010.459577] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256264, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.521708] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.577074] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256269, 'name': ReconfigVM_Task, 'duration_secs': 1.590166} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.577417] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc/7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1010.578420] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f385e9c-31fd-47fe-b9fb-203abb22b564 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.591027] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1010.591027] env[61545]: value = "task-4256275" [ 1010.591027] env[61545]: _type = "Task" [ 1010.591027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.602201] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256275, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.606317] env[61545]: DEBUG nova.compute.manager [req-fc444627-ebe4-4767-98db-cc53e8930ce2 req-189fe22f-b9fd-42b4-958b-84d5dd3377d2 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Received event network-changed-fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1010.606317] env[61545]: DEBUG nova.compute.manager [req-fc444627-ebe4-4767-98db-cc53e8930ce2 req-189fe22f-b9fd-42b4-958b-84d5dd3377d2 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Refreshing instance network info cache due to event network-changed-fab93887-61ce-41f0-a531-d540740b5acd. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1010.606568] env[61545]: DEBUG oslo_concurrency.lockutils [req-fc444627-ebe4-4767-98db-cc53e8930ce2 req-189fe22f-b9fd-42b4-958b-84d5dd3377d2 service nova] Acquiring lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.606716] env[61545]: DEBUG oslo_concurrency.lockutils [req-fc444627-ebe4-4767-98db-cc53e8930ce2 req-189fe22f-b9fd-42b4-958b-84d5dd3377d2 service nova] Acquired lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.606874] env[61545]: DEBUG nova.network.neutron [req-fc444627-ebe4-4767-98db-cc53e8930ce2 req-189fe22f-b9fd-42b4-958b-84d5dd3377d2 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Refreshing network info cache for port fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.692913] env[61545]: DEBUG nova.network.neutron [-] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.715605] env[61545]: DEBUG oslo_vmware.api [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.723876] env[61545]: DEBUG oslo_vmware.api [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256270, 'name': PowerOnVM_Task, 'duration_secs': 1.99754} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.728908] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.729272] env[61545]: DEBUG nova.compute.manager [None req-229fe942-6dde-4422-9dda-849f256fb6a1 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1010.731369] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468cbec6-9512-4ba7-ab7a-bf5f757a730e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.743429] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256272, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.763074] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.957344] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.724s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.957344] env[61545]: DEBUG nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1010.962557] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.073s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.962557] env[61545]: DEBUG nova.objects.instance [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'resources' on Instance uuid 56680678-c844-4dd2-8541-d50de83b22d7 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.972654] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256264, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.726253} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.973867] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a/OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a.vmdk to [datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78.vmdk. [ 1010.975105] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Cleaning up location [datastore2] OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1010.975912] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_b8c1c256-44a1-47f0-9e83-19c5cc92d47a {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.976946] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11d13595-77ab-4476-9b61-521bbaffdaa5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.987679] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1010.987679] env[61545]: value = "task-4256276" [ 1010.987679] env[61545]: _type = "Task" [ 1010.987679] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.000791] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256276, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.009403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-df07b66e-5121-488f-a0ea-61c61f94b0ac tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "3cff109e-3cb9-4369-987d-9821a858029a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.009665] env[61545]: DEBUG oslo_concurrency.lockutils [None req-df07b66e-5121-488f-a0ea-61c61f94b0ac tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "3cff109e-3cb9-4369-987d-9821a858029a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.026253] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256273, 'name': PowerOffVM_Task, 'duration_secs': 0.658471} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.027053] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1011.028565] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d541cab-e842-4a2b-8e60-5ace4a3976f6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.058129] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721999b1-eebc-4d8c-9fc3-7e7d85e30e13 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.104910] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256275, 'name': Rename_Task, 'duration_secs': 0.184084} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.109041] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.109041] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.109041] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66d2a814-516d-4d7c-bccb-f2ed0859231d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.109998] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7fbb54d3-be6d-4594-b4e1-689861ab8934 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.118812] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1011.118812] env[61545]: value = "task-4256277" [ 1011.118812] env[61545]: _type = "Task" [ 1011.118812] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.120726] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1011.120726] env[61545]: value = "task-4256278" [ 1011.120726] env[61545]: _type = "Task" [ 1011.120726] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.133922] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256278, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.138282] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1011.138516] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.138843] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.139088] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.139216] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.139564] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccdc983e-d71f-4f69-8e1e-a95d4ef046b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.151728] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.151937] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.152744] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93e4e48b-0b55-4474-9b12-163f421b444c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.163452] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1011.163452] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522c46c7-f221-f035-2eb3-1a2dcfae34a7" [ 1011.163452] env[61545]: _type = "Task" [ 1011.163452] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.177741] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522c46c7-f221-f035-2eb3-1a2dcfae34a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.199595] env[61545]: INFO nova.compute.manager [-] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Took 1.61 seconds to deallocate network for instance. [ 1011.216363] env[61545]: DEBUG oslo_vmware.api [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Task: {'id': task-4256274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.990516} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.216902] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.217100] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.217277] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.217444] env[61545]: INFO nova.compute.manager [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Took 2.85 seconds to destroy the instance on the hypervisor. [ 1011.217728] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1011.217929] env[61545]: DEBUG nova.compute.manager [-] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1011.218031] env[61545]: DEBUG nova.network.neutron [-] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.234323] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256272, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.467393] env[61545]: DEBUG nova.compute.utils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1011.471720] env[61545]: DEBUG nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1011.471842] env[61545]: DEBUG nova.network.neutron [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1011.509881] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256276, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187267} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.510215] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.510449] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.510798] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78.vmdk to [datastore2] 5a610b1c-df03-4ca6-83ff-ba651edcc8d0/5a610b1c-df03-4ca6-83ff-ba651edcc8d0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1011.511059] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1d6a4be-34b5-44bd-a012-4850778b498a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.517224] env[61545]: DEBUG nova.compute.manager [None req-df07b66e-5121-488f-a0ea-61c61f94b0ac tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 3cff109e-3cb9-4369-987d-9821a858029a] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1011.526836] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1011.526836] env[61545]: value = "task-4256279" [ 1011.526836] env[61545]: _type = "Task" [ 1011.526836] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.546740] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.643241] env[61545]: DEBUG oslo_vmware.api [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256278, 'name': PowerOnVM_Task, 'duration_secs': 0.512921} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.644377] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1011.644377] env[61545]: DEBUG nova.compute.manager [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1011.646280] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540bb0c0-09f4-48d5-8f65-642123bdae05 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.654683] env[61545]: DEBUG nova.policy [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa6cfd8bd3e74ef4a0ab2bbacf957e0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bc37508ff05143799aadd79ca75e546f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1011.675995] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522c46c7-f221-f035-2eb3-1a2dcfae34a7, 'name': SearchDatastore_Task, 'duration_secs': 0.01879} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.676846] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ece0142-a57e-48bc-9d1e-8abf4136077f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.684736] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1011.684736] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5290d611-91cc-9642-b2ba-c0fdf89873b8" [ 1011.684736] env[61545]: _type = "Task" [ 1011.684736] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.696201] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5290d611-91cc-9642-b2ba-c0fdf89873b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.719630] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.738388] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256272, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.933177] env[61545]: DEBUG nova.network.neutron [req-fc444627-ebe4-4767-98db-cc53e8930ce2 req-189fe22f-b9fd-42b4-958b-84d5dd3377d2 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Updated VIF entry in instance network info cache for port fab93887-61ce-41f0-a531-d540740b5acd. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.933177] env[61545]: DEBUG nova.network.neutron [req-fc444627-ebe4-4767-98db-cc53e8930ce2 req-189fe22f-b9fd-42b4-958b-84d5dd3377d2 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Updating instance_info_cache with network_info: [{"id": "fab93887-61ce-41f0-a531-d540740b5acd", "address": "fa:16:3e:3c:af:43", "network": {"id": "32d6e817-cd7d-4b1a-bbf6-470349a1b3aa", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1302718488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc2e8ab1dea400ca086a3039117cbff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfab93887-61", "ovs_interfaceid": "fab93887-61ce-41f0-a531-d540740b5acd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.941917] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b702c08c-32d4-4ebb-9b52-efdcc98c6749 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.953605] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc3322c-e454-49c6-b473-b3d1eaa5437f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.989683] env[61545]: DEBUG nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1011.993820] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14de0fe7-07b4-48f9-95c6-7754acda4852 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.003907] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f061a9c9-e864-482d-8fbc-6c8e6cfbacc4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.020507] env[61545]: DEBUG nova.compute.provider_tree [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.022140] env[61545]: DEBUG nova.compute.manager [None req-df07b66e-5121-488f-a0ea-61c61f94b0ac tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 3cff109e-3cb9-4369-987d-9821a858029a] Instance disappeared before build. {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 1012.039624] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.178738] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.195919] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5290d611-91cc-9642-b2ba-c0fdf89873b8, 'name': SearchDatastore_Task, 'duration_secs': 0.028012} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.196234] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.196509] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. {{(pid=61545) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1012.196805] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-234b028b-a4a5-4adb-8506-88ea7dd8a4e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.204359] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1012.204359] env[61545]: value = "task-4256280" [ 1012.204359] env[61545]: _type = "Task" [ 1012.204359] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.213792] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.236063] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256272, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.471106} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.236063] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 855904d8-7eb3-405d-9236-ab4ba9b33940/855904d8-7eb3-405d-9236-ab4ba9b33940.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1012.236063] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1012.236063] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9216c455-a0e9-478c-8f1a-74911f774b14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.246034] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1012.246034] env[61545]: value = "task-4256281" [ 1012.246034] env[61545]: _type = "Task" [ 1012.246034] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.257799] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256281, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.437391] env[61545]: DEBUG oslo_concurrency.lockutils [req-fc444627-ebe4-4767-98db-cc53e8930ce2 req-189fe22f-b9fd-42b4-958b-84d5dd3377d2 service nova] Releasing lock "refresh_cache-04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.524737] env[61545]: DEBUG nova.scheduler.client.report [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.548018] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256279, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.550896] env[61545]: DEBUG oslo_concurrency.lockutils [None req-df07b66e-5121-488f-a0ea-61c61f94b0ac tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "3cff109e-3cb9-4369-987d-9821a858029a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.541s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.720165] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256280, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.756976] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256281, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072183} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.758121] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1012.759654] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1346566c-c900-4366-9066-ca21df175b1a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.786452] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 855904d8-7eb3-405d-9236-ab4ba9b33940/855904d8-7eb3-405d-9236-ab4ba9b33940.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1012.786901] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80933fcc-1839-4bcc-a629-e3a168798832 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.813376] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1012.813376] env[61545]: value = "task-4256282" [ 1012.813376] env[61545]: _type = "Task" [ 1012.813376] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.825473] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256282, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.980938] env[61545]: DEBUG nova.network.neutron [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Successfully created port: c9965f3c-2499-49d5-ba57-a093571cfc9e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.004449] env[61545]: DEBUG nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1013.032783] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.071s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.044133] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.471s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.052157] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1013.055261] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1013.055261] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1013.055261] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1013.055261] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1013.055261] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1013.055696] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1013.055696] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1013.055836] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1013.055998] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1013.056311] env[61545]: DEBUG nova.virt.hardware [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1013.059036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9096de8-3ec1-45dc-b32c-6e8f42b5b4e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.081480] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314c890d-3ca2-4a1f-a379-39c49aac06c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.086776] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256279, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.087911] env[61545]: INFO nova.scheduler.client.report [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted allocations for instance 56680678-c844-4dd2-8541-d50de83b22d7 [ 1013.193884] env[61545]: DEBUG nova.compute.manager [req-6701dd8d-0a46-49a9-860d-ab752fe1e0fc req-c39ce720-fdec-442a-975a-9ba40dcd6eee service nova] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Received event network-vif-deleted-039bac9c-919b-4727-8313-ea1206afb5ec {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1013.222658] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256280, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.330057] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256282, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.480183] env[61545]: DEBUG nova.network.neutron [-] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.552336] env[61545]: INFO nova.compute.claims [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.558047] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256279, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.602336] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f9bdec80-8df0-4462-846f-30b8f654a024 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "56680678-c844-4dd2-8541-d50de83b22d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.856s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.721768] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256280, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.828342] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256282, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.985836] env[61545]: INFO nova.compute.manager [-] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Took 2.77 seconds to deallocate network for instance. [ 1014.046466] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256279, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.060516] env[61545]: INFO nova.compute.resource_tracker [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating resource usage from migration e866c452-7774-4a83-9c60-8d591a4adec9 [ 1014.227274] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256280, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.683095} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.227695] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk. [ 1014.229228] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8059c380-cc5a-4118-90b2-1f8fb97c7502 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.264395] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.267883] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-453f46f0-a771-4dde-8533-8d6a09b048a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.293997] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1014.293997] env[61545]: value = "task-4256283" [ 1014.293997] env[61545]: _type = "Task" [ 1014.293997] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.307700] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.307975] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.315408] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256283, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.335664] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256282, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.502309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.552525] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256279, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.604769] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260b0ac8-af0e-4161-ad22-93268e1b6b9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.617684] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ee341a-dfee-4910-a94f-98a0bb53f02b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.662698] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fef7499-3c3c-4ea1-b22e-68d9a623a28b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.675974] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4518b3-22a3-4c6a-8d77-42a00e9d9ef5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.699123] env[61545]: DEBUG nova.compute.provider_tree [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.807357] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256283, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.815468] env[61545]: DEBUG nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1014.833230] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256282, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.046526] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256279, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.411622} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.046824] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78/59ed3cb3-d35d-4df7-8ff8-ffc462b36b78.vmdk to [datastore2] 5a610b1c-df03-4ca6-83ff-ba651edcc8d0/5a610b1c-df03-4ca6-83ff-ba651edcc8d0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1015.047943] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90124f97-71f5-4d36-bdf9-c840f885774e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.076884] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 5a610b1c-df03-4ca6-83ff-ba651edcc8d0/5a610b1c-df03-4ca6-83ff-ba651edcc8d0.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.077324] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb093d77-b3d4-4a19-91cd-dc29f68e9658 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.101649] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1015.101649] env[61545]: value = "task-4256284" [ 1015.101649] env[61545]: _type = "Task" [ 1015.101649] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.112067] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256284, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.184690] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "97b72809-2a1e-4eda-af82-71cac2d79a64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.187592] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.203974] env[61545]: DEBUG nova.scheduler.client.report [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1015.285537] env[61545]: DEBUG nova.network.neutron [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Successfully updated port: c9965f3c-2499-49d5-ba57-a093571cfc9e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1015.309951] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256283, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.338959] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256282, 'name': ReconfigVM_Task, 'duration_secs': 2.405774} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.339505] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 855904d8-7eb3-405d-9236-ab4ba9b33940/855904d8-7eb3-405d-9236-ab4ba9b33940.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.340292] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8985f20-fbf1-4206-89f5-85f82a462c9c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.348992] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1015.348992] env[61545]: value = "task-4256285" [ 1015.348992] env[61545]: _type = "Task" [ 1015.348992] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.350449] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.360257] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256285, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.527638] env[61545]: DEBUG nova.compute.manager [req-f0857c2d-e646-4533-a5bc-badc1ce8a92a req-b53277bb-1ace-4726-a62b-163132a8b616 service nova] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Received event network-vif-deleted-b310c98a-9de2-40bc-a430-b4d1724a069b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1015.527638] env[61545]: DEBUG nova.compute.manager [req-f0857c2d-e646-4533-a5bc-badc1ce8a92a req-b53277bb-1ace-4726-a62b-163132a8b616 service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Received event network-vif-plugged-c9965f3c-2499-49d5-ba57-a093571cfc9e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1015.528058] env[61545]: DEBUG oslo_concurrency.lockutils [req-f0857c2d-e646-4533-a5bc-badc1ce8a92a req-b53277bb-1ace-4726-a62b-163132a8b616 service nova] Acquiring lock "7301c541-664f-43ec-8a34-86f38cac22ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.529494] env[61545]: DEBUG oslo_concurrency.lockutils [req-f0857c2d-e646-4533-a5bc-badc1ce8a92a req-b53277bb-1ace-4726-a62b-163132a8b616 service nova] Lock "7301c541-664f-43ec-8a34-86f38cac22ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.530165] env[61545]: DEBUG oslo_concurrency.lockutils [req-f0857c2d-e646-4533-a5bc-badc1ce8a92a req-b53277bb-1ace-4726-a62b-163132a8b616 service nova] Lock "7301c541-664f-43ec-8a34-86f38cac22ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.530165] env[61545]: DEBUG nova.compute.manager [req-f0857c2d-e646-4533-a5bc-badc1ce8a92a req-b53277bb-1ace-4726-a62b-163132a8b616 service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] No waiting events found dispatching network-vif-plugged-c9965f3c-2499-49d5-ba57-a093571cfc9e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1015.530165] env[61545]: WARNING nova.compute.manager [req-f0857c2d-e646-4533-a5bc-badc1ce8a92a req-b53277bb-1ace-4726-a62b-163132a8b616 service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Received unexpected event network-vif-plugged-c9965f3c-2499-49d5-ba57-a093571cfc9e for instance with vm_state building and task_state spawning. [ 1015.619548] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256284, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.687843] env[61545]: DEBUG nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1015.709478] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.665s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.709724] env[61545]: INFO nova.compute.manager [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Migrating [ 1015.716752] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.859s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.717063] env[61545]: DEBUG nova.objects.instance [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lazy-loading 'resources' on Instance uuid ced5bde7-07b9-4d07-8b13-49f6fb006eed {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.791827] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquiring lock "refresh_cache-7301c541-664f-43ec-8a34-86f38cac22ab" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.791972] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquired lock "refresh_cache-7301c541-664f-43ec-8a34-86f38cac22ab" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.792145] env[61545]: DEBUG nova.network.neutron [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.810034] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256283, 'name': ReconfigVM_Task, 'duration_secs': 1.193039} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.810399] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455/bf68eb43-6d66-4532-9eb1-af7d78faa698-rescue.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.811087] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f98e0e0-7d0d-4b5a-a83e-bf375f72273a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.845594] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b2e9dd9-68e0-4516-a64f-aca4d4b427fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.868415] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256285, 'name': Rename_Task, 'duration_secs': 0.289956} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.870041] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.870417] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1015.870417] env[61545]: value = "task-4256286" [ 1015.870417] env[61545]: _type = "Task" [ 1015.870417] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.870660] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a7aa090-dc05-46b5-b08d-0bd6454c351f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.882315] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1015.882315] env[61545]: value = "task-4256287" [ 1015.882315] env[61545]: _type = "Task" [ 1015.882315] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.886834] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256286, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.096319] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "d980f421-03b5-4b0e-b547-a33031356d55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.096745] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "d980f421-03b5-4b0e-b547-a33031356d55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.097031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "d980f421-03b5-4b0e-b547-a33031356d55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.097269] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "d980f421-03b5-4b0e-b547-a33031356d55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.097447] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "d980f421-03b5-4b0e-b547-a33031356d55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.101166] env[61545]: INFO nova.compute.manager [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Terminating instance [ 1016.118350] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256284, 'name': ReconfigVM_Task, 'duration_secs': 0.822115} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.118350] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 5a610b1c-df03-4ca6-83ff-ba651edcc8d0/5a610b1c-df03-4ca6-83ff-ba651edcc8d0.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.118350] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e2dd971-6460-465d-a2b3-9616effe727d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.128512] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1016.128512] env[61545]: value = "task-4256288" [ 1016.128512] env[61545]: _type = "Task" [ 1016.128512] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.143231] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256288, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.214903] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.229730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.229991] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.230197] env[61545]: DEBUG nova.network.neutron [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1016.368561] env[61545]: DEBUG nova.network.neutron [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1016.386459] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256286, 'name': ReconfigVM_Task, 'duration_secs': 0.375737} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.388030] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.390373] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27aa7a5e-7fef-4280-979c-66eb2003c8da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.405192] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1016.405192] env[61545]: value = "task-4256289" [ 1016.405192] env[61545]: _type = "Task" [ 1016.405192] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.405192] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256287, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.415197] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.608677] env[61545]: DEBUG nova.network.neutron [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Updating instance_info_cache with network_info: [{"id": "c9965f3c-2499-49d5-ba57-a093571cfc9e", "address": "fa:16:3e:5e:c5:a6", "network": {"id": "e0117de3-d943-40eb-b4c6-ca92a73e117e", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1843972988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc37508ff05143799aadd79ca75e546f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9965f3c-24", "ovs_interfaceid": "c9965f3c-2499-49d5-ba57-a093571cfc9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.610461] env[61545]: DEBUG nova.compute.manager [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1016.610681] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1016.615212] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13631f41-5faf-45d8-9753-ee4f8786273c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.627413] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.628546] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa0c6a5e-c7eb-412b-975e-9b322ad374c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.652151] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256288, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.652151] env[61545]: DEBUG oslo_vmware.api [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1016.652151] env[61545]: value = "task-4256290" [ 1016.652151] env[61545]: _type = "Task" [ 1016.652151] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.662495] env[61545]: DEBUG oslo_vmware.api [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.701352] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8ab5eb-501f-44d7-ae46-310061ec08b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.712108] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f26c95f-3262-4ab0-9e34-927da1995b8f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.753488] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ace6314-dacf-498c-af33-29ce537cfc2b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.764735] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138cc72b-6ea2-4d77-9ba5-7fda5659c614 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.787881] env[61545]: DEBUG nova.compute.provider_tree [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.903042] env[61545]: DEBUG oslo_vmware.api [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256287, 'name': PowerOnVM_Task, 'duration_secs': 0.979629} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.903042] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.903042] env[61545]: INFO nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Took 14.26 seconds to spawn the instance on the hypervisor. [ 1016.903042] env[61545]: DEBUG nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1016.903042] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b4f023-e0ff-4094-bd5c-50c0802840e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.922496] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256289, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.066271] env[61545]: DEBUG nova.compute.manager [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1017.115317] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Releasing lock "refresh_cache-7301c541-664f-43ec-8a34-86f38cac22ab" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.116429] env[61545]: DEBUG nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Instance network_info: |[{"id": "c9965f3c-2499-49d5-ba57-a093571cfc9e", "address": "fa:16:3e:5e:c5:a6", "network": {"id": "e0117de3-d943-40eb-b4c6-ca92a73e117e", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1843972988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc37508ff05143799aadd79ca75e546f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9965f3c-24", "ovs_interfaceid": "c9965f3c-2499-49d5-ba57-a093571cfc9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1017.116429] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:c5:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '750b5f9b-f78a-4650-9153-c5bb117e507c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9965f3c-2499-49d5-ba57-a093571cfc9e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1017.126783] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Creating folder: Project (bc37508ff05143799aadd79ca75e546f). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1017.128088] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ed608e2-5a21-46df-875b-97cdf21a21b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.145984] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256288, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.147994] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Created folder: Project (bc37508ff05143799aadd79ca75e546f) in parent group-v838542. [ 1017.148327] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Creating folder: Instances. Parent ref: group-v838765. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1017.148605] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9bb6cda4-d78b-4cc8-8de9-b9b3f9d21ced {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.163234] env[61545]: DEBUG oslo_vmware.api [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256290, 'name': PowerOffVM_Task, 'duration_secs': 0.262549} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.164837] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.165039] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.165330] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Created folder: Instances in parent group-v838765. [ 1017.165590] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1017.165731] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c011520e-8c36-4d4a-bb61-c23f62ed9e87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.167422] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1017.167710] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0abe96d5-b39c-41c3-ac2d-50eb4d3582f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.191905] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1017.191905] env[61545]: value = "task-4256294" [ 1017.191905] env[61545]: _type = "Task" [ 1017.191905] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.202754] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256294, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.274869] env[61545]: DEBUG nova.network.neutron [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance_info_cache with network_info: [{"id": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "address": "fa:16:3e:c5:11:ab", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd36c7f3a-2a", "ovs_interfaceid": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.278901] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.279147] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.279318] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleting the datastore file [datastore2] d980f421-03b5-4b0e-b547-a33031356d55 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.279996] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af845a10-0c8c-4cf2-943e-51b034ee8783 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.287262] env[61545]: DEBUG oslo_vmware.api [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1017.287262] env[61545]: value = "task-4256295" [ 1017.287262] env[61545]: _type = "Task" [ 1017.287262] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.292729] env[61545]: DEBUG nova.scheduler.client.report [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1017.303696] env[61545]: DEBUG oslo_vmware.api [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.416084] env[61545]: DEBUG oslo_vmware.api [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256289, 'name': PowerOnVM_Task, 'duration_secs': 0.587962} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.420613] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.425630] env[61545]: DEBUG nova.compute.manager [None req-0181f248-3db9-4d4a-90c0-4d02617d2224 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1017.425630] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8f6ad6-a3b3-4656-8707-af11f563a159 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.429637] env[61545]: INFO nova.compute.manager [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Took 22.88 seconds to build instance. [ 1017.602955] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.642743] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256288, 'name': Rename_Task, 'duration_secs': 1.269643} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.643120] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1017.643340] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfe9c70b-5df0-4c1e-acb0-2e355178a043 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.651126] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1017.651126] env[61545]: value = "task-4256296" [ 1017.651126] env[61545]: _type = "Task" [ 1017.651126] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.662811] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.701344] env[61545]: DEBUG nova.compute.manager [req-7e60ef00-9062-4595-bcbd-01b6dde0da9a req-f9bdd0b6-2e81-4960-8f9b-beb9190bfed6 service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Received event network-changed-c9965f3c-2499-49d5-ba57-a093571cfc9e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1017.701554] env[61545]: DEBUG nova.compute.manager [req-7e60ef00-9062-4595-bcbd-01b6dde0da9a req-f9bdd0b6-2e81-4960-8f9b-beb9190bfed6 service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Refreshing instance network info cache due to event network-changed-c9965f3c-2499-49d5-ba57-a093571cfc9e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1017.701879] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e60ef00-9062-4595-bcbd-01b6dde0da9a req-f9bdd0b6-2e81-4960-8f9b-beb9190bfed6 service nova] Acquiring lock "refresh_cache-7301c541-664f-43ec-8a34-86f38cac22ab" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.702044] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e60ef00-9062-4595-bcbd-01b6dde0da9a req-f9bdd0b6-2e81-4960-8f9b-beb9190bfed6 service nova] Acquired lock "refresh_cache-7301c541-664f-43ec-8a34-86f38cac22ab" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.702220] env[61545]: DEBUG nova.network.neutron [req-7e60ef00-9062-4595-bcbd-01b6dde0da9a req-f9bdd0b6-2e81-4960-8f9b-beb9190bfed6 service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Refreshing network info cache for port c9965f3c-2499-49d5-ba57-a093571cfc9e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1017.707778] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256294, 'name': CreateVM_Task, 'duration_secs': 0.472904} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.708234] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.709241] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.709241] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.709504] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1017.710140] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9492deb0-b7e9-4887-a385-325a5e059a3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.717023] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1017.717023] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521be107-22eb-3f96-f8f8-12b469928213" [ 1017.717023] env[61545]: _type = "Task" [ 1017.717023] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.729091] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521be107-22eb-3f96-f8f8-12b469928213, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.780951] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.799539] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.082s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.801295] env[61545]: DEBUG oslo_vmware.api [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308457} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.803227] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.039s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.803558] env[61545]: DEBUG nova.objects.instance [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lazy-loading 'resources' on Instance uuid 665db895-52ce-4e7c-9a78-86db5b695534 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.804974] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.805242] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1017.805532] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1017.805944] env[61545]: INFO nova.compute.manager [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1017.806186] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1017.807159] env[61545]: DEBUG nova.compute.manager [-] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1017.807415] env[61545]: DEBUG nova.network.neutron [-] [instance: d980f421-03b5-4b0e-b547-a33031356d55] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1017.834929] env[61545]: INFO nova.scheduler.client.report [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Deleted allocations for instance ced5bde7-07b9-4d07-8b13-49f6fb006eed [ 1017.931746] env[61545]: DEBUG oslo_concurrency.lockutils [None req-55877c90-ce63-4126-a736-4d7bdbd0090a tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "855904d8-7eb3-405d-9236-ab4ba9b33940" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.412s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.162833] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256296, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.236337] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521be107-22eb-3f96-f8f8-12b469928213, 'name': SearchDatastore_Task, 'duration_secs': 0.024279} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.236973] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.237096] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.237318] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.237441] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.237681] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.237986] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcdc5d49-ba6b-4b7d-bfbc-379a12a1a694 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.251411] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.251411] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1018.251411] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-434508aa-40ad-4325-8d00-6728659e0e43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.259224] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1018.259224] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52493b2d-8626-a8eb-db14-28fc7bccb4bb" [ 1018.259224] env[61545]: _type = "Task" [ 1018.259224] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.269109] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52493b2d-8626-a8eb-db14-28fc7bccb4bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.348264] env[61545]: DEBUG oslo_concurrency.lockutils [None req-96a0796e-0620-4e57-94ea-e558bb43dd79 tempest-SecurityGroupsTestJSON-958481474 tempest-SecurityGroupsTestJSON-958481474-project-member] Lock "ced5bde7-07b9-4d07-8b13-49f6fb006eed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.550s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.662661] env[61545]: DEBUG nova.network.neutron [req-7e60ef00-9062-4595-bcbd-01b6dde0da9a req-f9bdd0b6-2e81-4960-8f9b-beb9190bfed6 service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Updated VIF entry in instance network info cache for port c9965f3c-2499-49d5-ba57-a093571cfc9e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1018.663091] env[61545]: DEBUG nova.network.neutron [req-7e60ef00-9062-4595-bcbd-01b6dde0da9a req-f9bdd0b6-2e81-4960-8f9b-beb9190bfed6 service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Updating instance_info_cache with network_info: [{"id": "c9965f3c-2499-49d5-ba57-a093571cfc9e", "address": "fa:16:3e:5e:c5:a6", "network": {"id": "e0117de3-d943-40eb-b4c6-ca92a73e117e", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1843972988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bc37508ff05143799aadd79ca75e546f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "750b5f9b-f78a-4650-9153-c5bb117e507c", "external-id": "nsx-vlan-transportzone-237", "segmentation_id": 237, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9965f3c-24", "ovs_interfaceid": "c9965f3c-2499-49d5-ba57-a093571cfc9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.667961] env[61545]: DEBUG oslo_vmware.api [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256296, 'name': PowerOnVM_Task, 'duration_secs': 0.861456} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.671303] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.671577] env[61545]: INFO nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Took 18.76 seconds to spawn the instance on the hypervisor. [ 1018.671811] env[61545]: DEBUG nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.673247] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e66898-dd76-40a8-b8da-83848a33cfbf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.729557] env[61545]: DEBUG nova.compute.manager [req-ba620256-52e5-4d63-a182-0601a1b12eef req-d1a1eeca-fc42-4cd3-8a7c-2c8afbf51cf0 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Received event network-changed-7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1018.729841] env[61545]: DEBUG nova.compute.manager [req-ba620256-52e5-4d63-a182-0601a1b12eef req-d1a1eeca-fc42-4cd3-8a7c-2c8afbf51cf0 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Refreshing instance network info cache due to event network-changed-7e18e278-e525-407c-90fa-107184503c1c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1018.730195] env[61545]: DEBUG oslo_concurrency.lockutils [req-ba620256-52e5-4d63-a182-0601a1b12eef req-d1a1eeca-fc42-4cd3-8a7c-2c8afbf51cf0 service nova] Acquiring lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.730436] env[61545]: DEBUG oslo_concurrency.lockutils [req-ba620256-52e5-4d63-a182-0601a1b12eef req-d1a1eeca-fc42-4cd3-8a7c-2c8afbf51cf0 service nova] Acquired lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.730473] env[61545]: DEBUG nova.network.neutron [req-ba620256-52e5-4d63-a182-0601a1b12eef req-d1a1eeca-fc42-4cd3-8a7c-2c8afbf51cf0 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Refreshing network info cache for port 7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1018.776964] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52493b2d-8626-a8eb-db14-28fc7bccb4bb, 'name': SearchDatastore_Task, 'duration_secs': 0.017415} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.782018] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc1c7349-e2ff-4454-bf42-ce3f0d1ca2f6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.786771] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1018.786771] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5271a679-50ee-d43f-76d4-1cfb7f4d8940" [ 1018.786771] env[61545]: _type = "Task" [ 1018.786771] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.796652] env[61545]: DEBUG nova.network.neutron [-] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.806343] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5271a679-50ee-d43f-76d4-1cfb7f4d8940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.813017] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ee1f4f-4134-4fc0-a68c-06630739a9b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.820669] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd1a346-d646-45cf-943a-16159e4fa5ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.853535] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a30cba0-5f81-49bf-aa74-3db870122d71 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.865567] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6872c1a2-dfda-4c51-a819-131a6efbda84 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.884757] env[61545]: DEBUG nova.compute.provider_tree [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.172389] env[61545]: DEBUG oslo_concurrency.lockutils [req-7e60ef00-9062-4595-bcbd-01b6dde0da9a req-f9bdd0b6-2e81-4960-8f9b-beb9190bfed6 service nova] Releasing lock "refresh_cache-7301c541-664f-43ec-8a34-86f38cac22ab" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.195176] env[61545]: INFO nova.compute.manager [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Took 26.57 seconds to build instance. [ 1019.279135] env[61545]: INFO nova.compute.manager [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Unrescuing [ 1019.279498] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.279682] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquired lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.279888] env[61545]: DEBUG nova.network.neutron [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.301644] env[61545]: INFO nova.compute.manager [-] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Took 1.49 seconds to deallocate network for instance. [ 1019.302122] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5271a679-50ee-d43f-76d4-1cfb7f4d8940, 'name': SearchDatastore_Task, 'duration_secs': 0.026624} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.304650] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.304962] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7301c541-664f-43ec-8a34-86f38cac22ab/7301c541-664f-43ec-8a34-86f38cac22ab.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1019.306092] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8422634a-61d5-45e7-a9cc-8270678d88c7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.312135] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dee205d-04ba-43c8-9cf9-ef58f487440d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.335222] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance '1be4da80-c9ee-424e-b4e3-bdd22eb0cd67' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1019.340816] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1019.340816] env[61545]: value = "task-4256297" [ 1019.340816] env[61545]: _type = "Task" [ 1019.340816] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.388036] env[61545]: DEBUG nova.scheduler.client.report [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.695731] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9cd9a71d-9b0a-4ffe-be45-ab5f31963f20 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.084s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.757529] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.757977] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.762565] env[61545]: DEBUG nova.network.neutron [req-ba620256-52e5-4d63-a182-0601a1b12eef req-d1a1eeca-fc42-4cd3-8a7c-2c8afbf51cf0 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Updated VIF entry in instance network info cache for port 7e18e278-e525-407c-90fa-107184503c1c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1019.763356] env[61545]: DEBUG nova.network.neutron [req-ba620256-52e5-4d63-a182-0601a1b12eef req-d1a1eeca-fc42-4cd3-8a7c-2c8afbf51cf0 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Updating instance_info_cache with network_info: [{"id": "7e18e278-e525-407c-90fa-107184503c1c", "address": "fa:16:3e:c8:0e:6d", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e18e278-e5", "ovs_interfaceid": "7e18e278-e525-407c-90fa-107184503c1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.816176] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.844214] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.844596] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7ca42e1-34da-464b-b09f-13ea6eff05b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.856342] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.858097] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.859087] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.859498] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.859753] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.869096] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256297, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.871955] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1019.871955] env[61545]: value = "task-4256298" [ 1019.871955] env[61545]: _type = "Task" [ 1019.871955] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.873190] env[61545]: INFO nova.compute.manager [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Terminating instance [ 1019.890387] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1019.890647] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance '1be4da80-c9ee-424e-b4e3-bdd22eb0cd67' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1019.894712] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.092s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.901570] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.182s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.902045] env[61545]: DEBUG nova.objects.instance [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lazy-loading 'resources' on Instance uuid fed2c050-74e7-48f1-8a19-7c58e26d2159 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.935167] env[61545]: INFO nova.scheduler.client.report [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleted allocations for instance 665db895-52ce-4e7c-9a78-86db5b695534 [ 1019.942443] env[61545]: DEBUG nova.compute.manager [req-7e3cbdb7-5ebb-4b8a-93eb-6b3e5b51256c req-204e7525-2523-49b1-8ca0-7fd1eb8f46ee service nova] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Received event network-vif-deleted-eae1f8ca-5ff6-4f95-8ff5-9a4452601a17 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1020.269231] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.269648] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.269648] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.269868] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.270290] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.270359] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.270608] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1020.271053] env[61545]: DEBUG oslo_concurrency.lockutils [req-ba620256-52e5-4d63-a182-0601a1b12eef req-d1a1eeca-fc42-4cd3-8a7c-2c8afbf51cf0 service nova] Releasing lock "refresh_cache-855904d8-7eb3-405d-9236-ab4ba9b33940" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.271801] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.355528] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256297, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623856} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.355861] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7301c541-664f-43ec-8a34-86f38cac22ab/7301c541-664f-43ec-8a34-86f38cac22ab.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1020.356269] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1020.356609] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11ab729d-74b4-4dde-960d-f938fb0f044f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.364832] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1020.364832] env[61545]: value = "task-4256299" [ 1020.364832] env[61545]: _type = "Task" [ 1020.364832] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.375760] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256299, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.391591] env[61545]: DEBUG nova.compute.manager [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1020.391883] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.392853] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b70e974-ad1f-4cd4-b6e1-25f1458c3934 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.403934] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1020.404307] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.404554] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1020.404829] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.405089] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1020.405335] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1020.405620] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1020.405880] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1020.406184] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1020.406410] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1020.406719] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1020.413307] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.416694] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f91ab339-648b-4dff-95fe-d50b687d4fc6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.429225] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd9e4b9a-796f-4062-a800-8fc1b6670925 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.439376] env[61545]: DEBUG oslo_vmware.api [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1020.439376] env[61545]: value = "task-4256300" [ 1020.439376] env[61545]: _type = "Task" [ 1020.439376] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.444659] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1020.444659] env[61545]: value = "task-4256301" [ 1020.444659] env[61545]: _type = "Task" [ 1020.444659] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.453152] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d171ac7b-e44f-45c3-a4d1-983d266ac146 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "665db895-52ce-4e7c-9a78-86db5b695534" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.903s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.459397] env[61545]: DEBUG oslo_vmware.api [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.466643] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.747141] env[61545]: DEBUG nova.network.neutron [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating instance_info_cache with network_info: [{"id": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "address": "fa:16:3e:38:0c:1c", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fdf9f41-2f", "ovs_interfaceid": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.775158] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.879704] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256299, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077559} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.881094] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.881898] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b39a25c-3d64-4abc-8d65-bef6aa415a55 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.888214] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f337fec-19c8-4fc1-acf6-388664542bad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.907532] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2339eb38-ac1c-4a61-a416-72de01c3eec6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.919758] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 7301c541-664f-43ec-8a34-86f38cac22ab/7301c541-664f-43ec-8a34-86f38cac22ab.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.920127] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d7a40ad-ef50-4328-a334-522711bdeb83 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.973160] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc7c313-1325-48ab-805b-b70b6ca4c07d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.976363] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1020.976363] env[61545]: value = "task-4256302" [ 1020.976363] env[61545]: _type = "Task" [ 1020.976363] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.989461] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256301, 'name': ReconfigVM_Task, 'duration_secs': 0.270584} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.989714] env[61545]: DEBUG oslo_vmware.api [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256300, 'name': PowerOffVM_Task, 'duration_secs': 0.220606} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.990583] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance '1be4da80-c9ee-424e-b4e3-bdd22eb0cd67' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1020.994349] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.994538] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.995949] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca4c13a-da89-454e-938d-cf23dafb731a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.004910] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cc88e84-b5b5-4572-b868-ab75971cb558 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.007460] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.021707] env[61545]: DEBUG nova.compute.provider_tree [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.249908] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Releasing lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.250476] env[61545]: DEBUG nova.objects.instance [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lazy-loading 'flavor' on Instance uuid 1e5be92c-d727-4515-9e16-85ade2719455 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.311258] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.311599] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.311669] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleting the datastore file [datastore2] 5a610b1c-df03-4ca6-83ff-ba651edcc8d0 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.316036] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8c39863-87e1-4c6e-9e95-d894530d41e1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.320225] env[61545]: DEBUG oslo_vmware.api [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1021.320225] env[61545]: value = "task-4256304" [ 1021.320225] env[61545]: _type = "Task" [ 1021.320225] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.334433] env[61545]: DEBUG oslo_vmware.api [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.488298] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.503854] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1021.504137] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.504340] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1021.504535] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.504682] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1021.504830] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1021.505275] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1021.505554] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1021.505799] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1021.506055] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1021.506659] env[61545]: DEBUG nova.virt.hardware [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1021.512362] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Reconfiguring VM instance instance-00000036 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1021.512990] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ab5d53d-5bb1-4d73-bdcc-951edd0a7931 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.528858] env[61545]: DEBUG nova.scheduler.client.report [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.548504] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1021.548504] env[61545]: value = "task-4256305" [ 1021.548504] env[61545]: _type = "Task" [ 1021.548504] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.562909] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256305, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.766374] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5887cd7-990b-48a7-b8ea-03ae1d09112e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.797242] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.797751] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71275755-5bfd-497c-8d48-0954568a3aaa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.806121] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1021.806121] env[61545]: value = "task-4256306" [ 1021.806121] env[61545]: _type = "Task" [ 1021.806121] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.822276] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.837432] env[61545]: DEBUG oslo_vmware.api [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342006} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.837888] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.838098] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.838310] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.838492] env[61545]: INFO nova.compute.manager [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Took 1.45 seconds to destroy the instance on the hypervisor. [ 1021.838865] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.839105] env[61545]: DEBUG nova.compute.manager [-] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1021.839205] env[61545]: DEBUG nova.network.neutron [-] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.988056] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256302, 'name': ReconfigVM_Task, 'duration_secs': 0.747426} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.988286] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 7301c541-664f-43ec-8a34-86f38cac22ab/7301c541-664f-43ec-8a34-86f38cac22ab.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.988952] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b0a4607-f52f-4f25-a210-488e429ae91c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.995995] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1021.995995] env[61545]: value = "task-4256307" [ 1021.995995] env[61545]: _type = "Task" [ 1021.995995] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.007568] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256307, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.040023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.138s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.042918] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.864s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.043110] env[61545]: DEBUG nova.objects.instance [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1022.064757] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256305, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.068333] env[61545]: INFO nova.scheduler.client.report [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Deleted allocations for instance fed2c050-74e7-48f1-8a19-7c58e26d2159 [ 1022.317853] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256306, 'name': PowerOffVM_Task, 'duration_secs': 0.266107} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.317853] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1022.322820] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfiguring VM instance instance-00000041 to detach disk 2002 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1022.323157] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b699594a-0493-4164-b97c-61db82ae902b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.349113] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1022.349113] env[61545]: value = "task-4256308" [ 1022.349113] env[61545]: _type = "Task" [ 1022.349113] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.358277] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256308, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.507018] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256307, 'name': Rename_Task, 'duration_secs': 0.179325} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.507334] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1022.507805] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1aa9c9f7-711c-4238-b05b-ab416843010f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.515212] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1022.515212] env[61545]: value = "task-4256309" [ 1022.515212] env[61545]: _type = "Task" [ 1022.515212] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.524930] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256309, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.570597] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256305, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.580740] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9373ebb0-c149-42eb-b01d-a18e50d048e6 tempest-ServerAddressesTestJSON-1186249743 tempest-ServerAddressesTestJSON-1186249743-project-member] Lock "fed2c050-74e7-48f1-8a19-7c58e26d2159" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.674s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.864023] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256308, 'name': ReconfigVM_Task, 'duration_secs': 0.295609} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.864023] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfigured VM instance instance-00000041 to detach disk 2002 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1022.864023] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1022.864023] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d82407e2-7349-4357-89bb-e5acec4147ee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.871459] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1022.871459] env[61545]: value = "task-4256310" [ 1022.871459] env[61545]: _type = "Task" [ 1022.871459] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.881378] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.926903] env[61545]: DEBUG nova.compute.manager [req-b98951a6-ca59-4c41-a93c-669963cd111c req-92e5c08e-3b55-4d8c-95b2-d99485c680e3 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Received event network-vif-deleted-ac373ca4-eda8-462a-a658-52fa15cb8b0b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1022.926903] env[61545]: INFO nova.compute.manager [req-b98951a6-ca59-4c41-a93c-669963cd111c req-92e5c08e-3b55-4d8c-95b2-d99485c680e3 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Neutron deleted interface ac373ca4-eda8-462a-a658-52fa15cb8b0b; detaching it from the instance and deleting it from the info cache [ 1022.926903] env[61545]: DEBUG nova.network.neutron [req-b98951a6-ca59-4c41-a93c-669963cd111c req-92e5c08e-3b55-4d8c-95b2-d99485c680e3 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.027062] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256309, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.063179] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d980e1ef-13bf-4cae-a272-41ceedc35f01 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.069259] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256305, 'name': ReconfigVM_Task, 'duration_secs': 1.312102} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.069259] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.565s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.069259] env[61545]: DEBUG nova.objects.instance [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lazy-loading 'resources' on Instance uuid 73df6df0-ead6-49cd-8b0a-5e95acfc7e15 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.073288] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Reconfigured VM instance instance-00000036 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1023.074363] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5ae1c4-2884-4246-83d2-f3cbe4ae50be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.101239] env[61545]: DEBUG nova.network.neutron [-] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.113321] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67/1be4da80-c9ee-424e-b4e3-bdd22eb0cd67.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.115870] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19578c36-27fc-41de-90c9-22b71f62ed37 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.138609] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1023.138609] env[61545]: value = "task-4256311" [ 1023.138609] env[61545]: _type = "Task" [ 1023.138609] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.150479] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256311, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.274998] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "b33e29cc-fe26-429a-8799-8d790667cc1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.275265] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "b33e29cc-fe26-429a-8799-8d790667cc1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.388420] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256310, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.432623] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30b67ab5-dd66-4d91-84c1-8afdf6f0580f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.442955] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7b1440-11c0-408a-8642-096f9eb48590 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.483240] env[61545]: DEBUG nova.compute.manager [req-b98951a6-ca59-4c41-a93c-669963cd111c req-92e5c08e-3b55-4d8c-95b2-d99485c680e3 service nova] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Detach interface failed, port_id=ac373ca4-eda8-462a-a658-52fa15cb8b0b, reason: Instance 5a610b1c-df03-4ca6-83ff-ba651edcc8d0 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1023.528021] env[61545]: DEBUG oslo_vmware.api [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256309, 'name': PowerOnVM_Task, 'duration_secs': 0.598554} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.528021] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.528021] env[61545]: INFO nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Took 10.52 seconds to spawn the instance on the hypervisor. [ 1023.528021] env[61545]: DEBUG nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.528296] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49822f90-ef85-4ed6-acb7-19238a3d9ad1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.615207] env[61545]: INFO nova.compute.manager [-] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Took 1.78 seconds to deallocate network for instance. [ 1023.652424] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256311, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.777784] env[61545]: DEBUG nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1023.887193] env[61545]: DEBUG oslo_vmware.api [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256310, 'name': PowerOnVM_Task, 'duration_secs': 0.520359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.887727] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.887855] env[61545]: DEBUG nova.compute.manager [None req-d6bd6987-394e-4c51-8f6a-fb1d18c0514c tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.888644] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30131e2-d52d-462d-b7f5-6399aeb4ad16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.054162] env[61545]: INFO nova.compute.manager [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Took 21.54 seconds to build instance. [ 1024.066525] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56465966-9533-41f0-b1bc-2ada653e1378 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.079113] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9938c8-1d8b-4724-b679-999bfcde965e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.117825] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6043005-d324-48c9-b071-644777a25428 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.127555] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd97fc0-021f-4c85-8d15-d25b1fdad524 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.133526] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.147783] env[61545]: DEBUG nova.compute.provider_tree [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.163282] env[61545]: DEBUG oslo_vmware.api [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256311, 'name': ReconfigVM_Task, 'duration_secs': 0.649916} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.164432] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67/1be4da80-c9ee-424e-b4e3-bdd22eb0cd67.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.164530] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance '1be4da80-c9ee-424e-b4e3-bdd22eb0cd67' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1024.306673] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.556666] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e7de878-1341-4c5b-9f97-72f669e44b2d tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "7301c541-664f-43ec-8a34-86f38cac22ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.066s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.658384] env[61545]: DEBUG nova.scheduler.client.report [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.671333] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd747277-fcbf-4f50-8bfe-17f053e497c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.694249] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de7e2e9-062b-4529-b8d2-bc6e4ca5bfee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.716055] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance '1be4da80-c9ee-424e-b4e3-bdd22eb0cd67' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1024.867233] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe831b-51e9-8386-2cf2-3b8b30c3305d/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1024.868750] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba58154-3108-4f54-87f0-b2bab72eabe0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.878459] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe831b-51e9-8386-2cf2-3b8b30c3305d/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1024.878750] env[61545]: ERROR oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe831b-51e9-8386-2cf2-3b8b30c3305d/disk-0.vmdk due to incomplete transfer. [ 1024.879900] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e35a616d-2a00-4b96-b9d4-7b91a1a3583d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.890572] env[61545]: DEBUG oslo_vmware.rw_handles [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fe831b-51e9-8386-2cf2-3b8b30c3305d/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1024.891041] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Uploaded image 5bc7010a-ee45-48d1-87c9-e47216be1200 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1024.893465] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1024.894212] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-908066ed-158b-438f-a003-171340ebef87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.903027] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1024.903027] env[61545]: value = "task-4256312" [ 1024.903027] env[61545]: _type = "Task" [ 1024.903027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.914301] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256312, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.163985] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.168640] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.818s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.172028] env[61545]: INFO nova.compute.claims [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.203385] env[61545]: INFO nova.scheduler.client.report [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Deleted allocations for instance 73df6df0-ead6-49cd-8b0a-5e95acfc7e15 [ 1025.406673] env[61545]: DEBUG nova.network.neutron [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Port d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1025.422206] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256312, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.716394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a19db587-5482-4247-9c81-80b0e00a9143 tempest-ServersWithSpecificFlavorTestJSON-1598309329 tempest-ServersWithSpecificFlavorTestJSON-1598309329-project-member] Lock "73df6df0-ead6-49cd-8b0a-5e95acfc7e15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.886s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.919745] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256312, 'name': Destroy_Task, 'duration_secs': 0.710349} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.920291] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Destroyed the VM [ 1025.920528] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1025.920813] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d89c8c7a-1e9e-40d2-b0a8-a0ee3be48342 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.930311] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1025.930311] env[61545]: value = "task-4256313" [ 1025.930311] env[61545]: _type = "Task" [ 1025.930311] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.943574] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256313, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.985821] env[61545]: DEBUG nova.compute.manager [req-03c2b7e9-d70d-49c1-a8b5-10601c7417cc req-4ee62206-f36c-401b-b186-d4468bb03a7b service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Received event network-changed-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1025.986075] env[61545]: DEBUG nova.compute.manager [req-03c2b7e9-d70d-49c1-a8b5-10601c7417cc req-4ee62206-f36c-401b-b186-d4468bb03a7b service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Refreshing instance network info cache due to event network-changed-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1025.986269] env[61545]: DEBUG oslo_concurrency.lockutils [req-03c2b7e9-d70d-49c1-a8b5-10601c7417cc req-4ee62206-f36c-401b-b186-d4468bb03a7b service nova] Acquiring lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.986394] env[61545]: DEBUG oslo_concurrency.lockutils [req-03c2b7e9-d70d-49c1-a8b5-10601c7417cc req-4ee62206-f36c-401b-b186-d4468bb03a7b service nova] Acquired lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.986554] env[61545]: DEBUG nova.network.neutron [req-03c2b7e9-d70d-49c1-a8b5-10601c7417cc req-4ee62206-f36c-401b-b186-d4468bb03a7b service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Refreshing network info cache for port 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.433113] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.433371] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.433560] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.455824] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256313, 'name': RemoveSnapshot_Task, 'duration_secs': 0.419321} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.459327] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1026.459649] env[61545]: DEBUG nova.compute.manager [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.461799] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7308d352-1063-41a0-910c-0c1d7372c73d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.579627] env[61545]: DEBUG nova.compute.manager [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.580990] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa308e2-2b2f-40dc-bae2-21a86eea5c5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.661917] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93505df-2554-4eea-acd7-8a9e52342903 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.672443] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf3ca1f-1ea8-4c55-b328-acd01ccae1ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.708211] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa3b249-3b42-44b5-a2c5-c5c17f632317 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.720987] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9475d1c3-8625-43b7-b9f8-6b554015397d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.739593] env[61545]: DEBUG nova.compute.provider_tree [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.977166] env[61545]: DEBUG nova.network.neutron [req-03c2b7e9-d70d-49c1-a8b5-10601c7417cc req-4ee62206-f36c-401b-b186-d4468bb03a7b service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updated VIF entry in instance network info cache for port 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1026.977535] env[61545]: DEBUG nova.network.neutron [req-03c2b7e9-d70d-49c1-a8b5-10601c7417cc req-4ee62206-f36c-401b-b186-d4468bb03a7b service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating instance_info_cache with network_info: [{"id": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "address": "fa:16:3e:38:0c:1c", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fdf9f41-2f", "ovs_interfaceid": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.979802] env[61545]: INFO nova.compute.manager [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Shelve offloading [ 1027.103228] env[61545]: INFO nova.compute.manager [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] instance snapshotting [ 1027.105990] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec80690-0187-404e-b640-7815c7912910 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.126792] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c863a419-db3e-4793-9d8d-49b7e89ddfb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.243632] env[61545]: DEBUG nova.scheduler.client.report [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1027.482282] env[61545]: DEBUG oslo_concurrency.lockutils [req-03c2b7e9-d70d-49c1-a8b5-10601c7417cc req-4ee62206-f36c-401b-b186-d4468bb03a7b service nova] Releasing lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.487960] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.487960] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2fa5b8f-3030-4cc1-b648-88fb984c6435 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.498560] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1027.498560] env[61545]: value = "task-4256314" [ 1027.498560] env[61545]: _type = "Task" [ 1027.498560] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.513189] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.513479] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.513678] env[61545]: DEBUG nova.network.neutron [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1027.514995] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1027.518778] env[61545]: DEBUG nova.compute.manager [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.518778] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f9e201-290f-48d1-8865-38e410b59f22 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.527553] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.527553] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.528120] env[61545]: DEBUG nova.network.neutron [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1027.638707] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1027.639124] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c091110d-4dcb-4a06-841e-d730bce9ab31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.649668] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1027.649668] env[61545]: value = "task-4256315" [ 1027.649668] env[61545]: _type = "Task" [ 1027.649668] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.661153] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256315, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.749926] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.750449] env[61545]: DEBUG nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.753295] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.539s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.755527] env[61545]: INFO nova.compute.claims [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.162045] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256315, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.234639] env[61545]: DEBUG nova.compute.manager [req-62187a45-7222-49cb-85cd-92cdbce00699 req-f7e580b0-01fe-45ab-b73c-52efbf4f7c67 service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Received event network-changed-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1028.236156] env[61545]: DEBUG nova.compute.manager [req-62187a45-7222-49cb-85cd-92cdbce00699 req-f7e580b0-01fe-45ab-b73c-52efbf4f7c67 service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Refreshing instance network info cache due to event network-changed-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1028.236156] env[61545]: DEBUG oslo_concurrency.lockutils [req-62187a45-7222-49cb-85cd-92cdbce00699 req-f7e580b0-01fe-45ab-b73c-52efbf4f7c67 service nova] Acquiring lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.236156] env[61545]: DEBUG oslo_concurrency.lockutils [req-62187a45-7222-49cb-85cd-92cdbce00699 req-f7e580b0-01fe-45ab-b73c-52efbf4f7c67 service nova] Acquired lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.236156] env[61545]: DEBUG nova.network.neutron [req-62187a45-7222-49cb-85cd-92cdbce00699 req-f7e580b0-01fe-45ab-b73c-52efbf4f7c67 service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Refreshing network info cache for port 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.262539] env[61545]: DEBUG nova.compute.utils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.267298] env[61545]: DEBUG nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1028.267513] env[61545]: DEBUG nova.network.neutron [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.421535] env[61545]: DEBUG nova.policy [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25fd6c8662bd4b7f9da546ec78acda02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68a860104885480d9da472bc969ba6d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1028.664166] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256315, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.746706] env[61545]: DEBUG nova.network.neutron [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance_info_cache with network_info: [{"id": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "address": "fa:16:3e:c5:11:ab", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd36c7f3a-2a", "ovs_interfaceid": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.771991] env[61545]: DEBUG nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.795361] env[61545]: DEBUG nova.network.neutron [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating instance_info_cache with network_info: [{"id": "7e7e6bd8-fac2-4516-af29-a249216acca6", "address": "fa:16:3e:b8:a3:67", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e6bd8-fa", "ovs_interfaceid": "7e7e6bd8-fac2-4516-af29-a249216acca6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.177097] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256315, 'name': CreateSnapshot_Task, 'duration_secs': 1.107607} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.177097] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1029.177868] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7adea298-ffec-4b29-be33-e4b46b284a6e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.253162] env[61545]: DEBUG oslo_concurrency.lockutils [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.299166] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.349343] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143e4c0d-47ca-47ef-b30c-7a5c5f5a481e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.358940] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda76697-fc0d-45f8-bb19-49b36e282f7c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.412297] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7265f533-7e57-4ea8-b0b8-6bd8c5152bb1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.428889] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741caca9-68d3-48ec-96c2-8559c2611d10 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.450350] env[61545]: DEBUG nova.compute.provider_tree [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.547373] env[61545]: DEBUG nova.network.neutron [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Successfully created port: 6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.597670] env[61545]: DEBUG nova.network.neutron [req-62187a45-7222-49cb-85cd-92cdbce00699 req-f7e580b0-01fe-45ab-b73c-52efbf4f7c67 service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updated VIF entry in instance network info cache for port 5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1029.597670] env[61545]: DEBUG nova.network.neutron [req-62187a45-7222-49cb-85cd-92cdbce00699 req-f7e580b0-01fe-45ab-b73c-52efbf4f7c67 service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating instance_info_cache with network_info: [{"id": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "address": "fa:16:3e:38:0c:1c", "network": {"id": "7eae0a85-4d77-4534-850c-f8fdd2db95fa", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-659537870-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c33b968c2bbc431686e949fdf795fa76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fdf9f41-2f", "ovs_interfaceid": "5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.713505] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1029.713978] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a350ea84-3aab-40ae-82a6-246d98105105 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.726817] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1029.726817] env[61545]: value = "task-4256316" [ 1029.726817] env[61545]: _type = "Task" [ 1029.726817] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.742058] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256316, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.786952] env[61545]: DEBUG nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.791765] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23488ab-6a34-4274-84e2-851ea1e0a5fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.822936] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0045813a-b034-4d69-a9d4-3c3e132dbb52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.837190] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance '1be4da80-c9ee-424e-b4e3-bdd22eb0cd67' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1029.846418] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1029.846418] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.846418] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1029.846418] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.846418] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1029.846418] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1029.846418] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1029.846737] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1029.846737] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1029.847327] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1029.847327] env[61545]: DEBUG nova.virt.hardware [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1029.848165] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b710b8c1-011d-49a2-ba59-9ab29f8fdb5c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.856789] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cb06d6-b8b4-4410-ad8c-32ebe1ef8c76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.955635] env[61545]: DEBUG nova.scheduler.client.report [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.103709] env[61545]: DEBUG oslo_concurrency.lockutils [req-62187a45-7222-49cb-85cd-92cdbce00699 req-f7e580b0-01fe-45ab-b73c-52efbf4f7c67 service nova] Releasing lock "refresh_cache-1e5be92c-d727-4515-9e16-85ade2719455" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.241377] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256316, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.354620] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-23ed5c29-97a1-43ce-a006-169c2176a54b tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance '1be4da80-c9ee-424e-b4e3-bdd22eb0cd67' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1030.460018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.460757] env[61545]: DEBUG nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1030.467989] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.862s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.743114] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256316, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.763270] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.763270] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a074d8-3095-40d3-9b6a-a5a7d3bd1532 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.773174] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1030.774266] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12fba20d-5970-4d1d-9eaf-c13854ac1458 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.863784] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1030.864182] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1030.864261] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleting the datastore file [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.864477] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69a3f3fd-db47-467e-ae28-a003501231a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.872400] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1030.872400] env[61545]: value = "task-4256318" [ 1030.872400] env[61545]: _type = "Task" [ 1030.872400] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.884558] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.970824] env[61545]: DEBUG nova.compute.utils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1030.975131] env[61545]: INFO nova.compute.claims [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1030.984589] env[61545]: DEBUG nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1030.984589] env[61545]: DEBUG nova.network.neutron [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1031.051740] env[61545]: DEBUG nova.compute.manager [req-c904f9a8-c456-44d0-b121-dc556cea63ee req-2de32079-7f49-4daa-b933-ce3416572591 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received event network-vif-unplugged-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1031.051979] env[61545]: DEBUG oslo_concurrency.lockutils [req-c904f9a8-c456-44d0-b121-dc556cea63ee req-2de32079-7f49-4daa-b933-ce3416572591 service nova] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.053330] env[61545]: DEBUG oslo_concurrency.lockutils [req-c904f9a8-c456-44d0-b121-dc556cea63ee req-2de32079-7f49-4daa-b933-ce3416572591 service nova] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.053523] env[61545]: DEBUG oslo_concurrency.lockutils [req-c904f9a8-c456-44d0-b121-dc556cea63ee req-2de32079-7f49-4daa-b933-ce3416572591 service nova] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.053736] env[61545]: DEBUG nova.compute.manager [req-c904f9a8-c456-44d0-b121-dc556cea63ee req-2de32079-7f49-4daa-b933-ce3416572591 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] No waiting events found dispatching network-vif-unplugged-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.053877] env[61545]: WARNING nova.compute.manager [req-c904f9a8-c456-44d0-b121-dc556cea63ee req-2de32079-7f49-4daa-b933-ce3416572591 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received unexpected event network-vif-unplugged-7e7e6bd8-fac2-4516-af29-a249216acca6 for instance with vm_state shelved and task_state shelving_offloading. [ 1031.074021] env[61545]: DEBUG nova.policy [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1031.251368] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256316, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.358314] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquiring lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.358718] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.387227] env[61545]: DEBUG oslo_vmware.api [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256318, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211636} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.387227] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1031.387494] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1031.387765] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1031.413889] env[61545]: INFO nova.scheduler.client.report [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted allocations for instance 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 [ 1031.485216] env[61545]: INFO nova.compute.resource_tracker [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating resource usage from migration 4a8c88d2-213c-4f97-884a-8726cbc598fe [ 1031.489073] env[61545]: DEBUG nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1031.742215] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256316, 'name': CloneVM_Task, 'duration_secs': 1.801045} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.747143] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Created linked-clone VM from snapshot [ 1031.748525] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6a5147-f4ab-4e4d-88d3-15d2bda977b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.759083] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Uploading image 2db3fa56-39d4-433c-8da2-93da32a05392 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1031.786763] env[61545]: DEBUG nova.network.neutron [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Successfully created port: ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.804263] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1031.804263] env[61545]: value = "vm-838769" [ 1031.804263] env[61545]: _type = "VirtualMachine" [ 1031.804263] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1031.804598] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-dd5b3a74-3e1d-495a-a16a-1c2feef82ec9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.814832] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lease: (returnval){ [ 1031.814832] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528cfeca-8f7e-f3ce-9ad4-eea118bbb09e" [ 1031.814832] env[61545]: _type = "HttpNfcLease" [ 1031.814832] env[61545]: } obtained for exporting VM: (result){ [ 1031.814832] env[61545]: value = "vm-838769" [ 1031.814832] env[61545]: _type = "VirtualMachine" [ 1031.814832] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1031.815121] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the lease: (returnval){ [ 1031.815121] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528cfeca-8f7e-f3ce-9ad4-eea118bbb09e" [ 1031.815121] env[61545]: _type = "HttpNfcLease" [ 1031.815121] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1031.822766] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1031.822766] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528cfeca-8f7e-f3ce-9ad4-eea118bbb09e" [ 1031.822766] env[61545]: _type = "HttpNfcLease" [ 1031.822766] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1031.863032] env[61545]: DEBUG nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1031.923344] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.950796] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753f3e25-ffa9-47a4-94b2-af572eddd45c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.961704] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b068e77-3f33-46be-bef5-39e1a9e4dd8e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.005048] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a8b56a-5957-4a33-84cf-b4ae9da34ed4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.011894] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fd4247-f353-4076-84a8-3d659cad170f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.030150] env[61545]: DEBUG nova.compute.provider_tree [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.325989] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1032.325989] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528cfeca-8f7e-f3ce-9ad4-eea118bbb09e" [ 1032.325989] env[61545]: _type = "HttpNfcLease" [ 1032.325989] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1032.326339] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1032.326339] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528cfeca-8f7e-f3ce-9ad4-eea118bbb09e" [ 1032.326339] env[61545]: _type = "HttpNfcLease" [ 1032.326339] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1032.327585] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7968513-02eb-4d35-a0c6-cba6037e3199 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.337829] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f7cb21-1fa6-814a-27fe-c1efdce46c00/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1032.338599] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f7cb21-1fa6-814a-27fe-c1efdce46c00/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1032.406644] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.406859] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.407034] env[61545]: DEBUG nova.compute.manager [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Going to confirm migration 3 {{(pid=61545) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1032.434704] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.504102] env[61545]: DEBUG nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1032.511552] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-59c96c15-7366-4b8d-b6fd-f6ec20cc142f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.533970] env[61545]: DEBUG nova.scheduler.client.report [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.542783] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1032.543032] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.543853] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1032.543853] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.543853] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1032.543853] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1032.543853] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1032.544120] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1032.544331] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1032.544636] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1032.546053] env[61545]: DEBUG nova.virt.hardware [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1032.548189] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4b4cb5-d5db-416b-a024-6803b6e72564 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.559643] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398d2217-83bc-4636-aa59-a37ab3ca6106 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.625555] env[61545]: DEBUG nova.network.neutron [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Successfully updated port: 6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.947748] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.948690] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.948690] env[61545]: DEBUG nova.network.neutron [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.948690] env[61545]: DEBUG nova.objects.instance [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'info_cache' on Instance uuid 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.040097] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.575s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.040302] env[61545]: INFO nova.compute.manager [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Migrating [ 1033.058973] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.243s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.064465] env[61545]: DEBUG nova.objects.instance [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lazy-loading 'resources' on Instance uuid d980f421-03b5-4b0e-b547-a33031356d55 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.101331] env[61545]: DEBUG nova.compute.manager [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received event network-changed-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1033.102474] env[61545]: DEBUG nova.compute.manager [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Refreshing instance network info cache due to event network-changed-7e7e6bd8-fac2-4516-af29-a249216acca6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1033.102641] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Acquiring lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.102782] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Acquired lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.103056] env[61545]: DEBUG nova.network.neutron [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Refreshing network info cache for port 7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.128882] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.128882] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.128882] env[61545]: DEBUG nova.network.neutron [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.584413] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.584413] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.584413] env[61545]: DEBUG nova.network.neutron [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.732976] env[61545]: DEBUG nova.network.neutron [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1034.025376] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40a3d7d-7109-4350-a946-05b6196acb61 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.034574] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2836facc-e00c-4f2d-9f20-bd562766a3d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.081446] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e9896d-f605-4556-8f8c-05036af512f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.101022] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6fcdac-64e5-463f-b4c5-fdfcec82d285 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.116625] env[61545]: DEBUG nova.compute.provider_tree [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.272925] env[61545]: DEBUG nova.network.neutron [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Updating instance_info_cache with network_info: [{"id": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "address": "fa:16:3e:28:6c:a3", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b10a96e-65", "ovs_interfaceid": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.382139] env[61545]: DEBUG nova.network.neutron [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.394259] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.492209] env[61545]: DEBUG nova.network.neutron [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updated VIF entry in instance network info cache for port 7e7e6bd8-fac2-4516-af29-a249216acca6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.492577] env[61545]: DEBUG nova.network.neutron [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating instance_info_cache with network_info: [{"id": "7e7e6bd8-fac2-4516-af29-a249216acca6", "address": "fa:16:3e:b8:a3:67", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap7e7e6bd8-fa", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.623024] env[61545]: DEBUG nova.scheduler.client.report [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.686042] env[61545]: DEBUG nova.network.neutron [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Successfully updated port: ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.711376] env[61545]: DEBUG nova.network.neutron [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance_info_cache with network_info: [{"id": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "address": "fa:16:3e:c5:11:ab", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd36c7f3a-2a", "ovs_interfaceid": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.775949] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.776313] env[61545]: DEBUG nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Instance network_info: |[{"id": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "address": "fa:16:3e:28:6c:a3", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b10a96e-65", "ovs_interfaceid": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1034.776738] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:6c:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b10a96e-6545-4419-9cd0-afa5ce3e2cc2', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.784613] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1034.784850] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.785099] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fa6bdcd-c118-40d6-ba60-0c4a7ad5d385 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.885424] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.995698] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Releasing lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.996022] env[61545]: DEBUG nova.compute.manager [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Received event network-vif-plugged-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1034.996254] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Acquiring lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.996510] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.996722] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.996921] env[61545]: DEBUG nova.compute.manager [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] No waiting events found dispatching network-vif-plugged-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.997142] env[61545]: WARNING nova.compute.manager [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Received unexpected event network-vif-plugged-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 for instance with vm_state building and task_state spawning. [ 1034.997358] env[61545]: DEBUG nova.compute.manager [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Received event network-changed-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1034.997537] env[61545]: DEBUG nova.compute.manager [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Refreshing instance network info cache due to event network-changed-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1034.997731] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Acquiring lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.998145] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Acquired lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.998145] env[61545]: DEBUG nova.network.neutron [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Refreshing network info cache for port 6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.066088] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.066088] env[61545]: value = "task-4256320" [ 1035.066088] env[61545]: _type = "Task" [ 1035.066088] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.076553] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256320, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.131031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.132311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.357s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.132766] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.133141] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1035.134767] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.134767] env[61545]: DEBUG nova.objects.instance [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lazy-loading 'resources' on Instance uuid 5a610b1c-df03-4ca6-83ff-ba651edcc8d0 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.136681] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0e7d20-42cc-4939-8f75-50cd77bdb0cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.149623] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89daa43b-9d6e-4550-911d-05b7f5417b7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.157277] env[61545]: INFO nova.scheduler.client.report [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted allocations for instance d980f421-03b5-4b0e-b547-a33031356d55 [ 1035.173879] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df6f1db-bc05-4947-bf8f-54c834cacd7e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.184161] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2023e7-e2b8-4a00-aa37-30e1787117ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.188038] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.188085] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.188670] env[61545]: DEBUG nova.network.neutron [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.223851] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.224192] env[61545]: DEBUG nova.objects.instance [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'migration_context' on Instance uuid 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.225904] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178090MB free_disk=244GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1035.226196] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.286187] env[61545]: DEBUG nova.network.neutron [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.556621] env[61545]: DEBUG nova.compute.manager [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received event network-vif-plugged-ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1035.556906] env[61545]: DEBUG oslo_concurrency.lockutils [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] Acquiring lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.557087] env[61545]: DEBUG oslo_concurrency.lockutils [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.557256] env[61545]: DEBUG oslo_concurrency.lockutils [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.557484] env[61545]: DEBUG nova.compute.manager [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] No waiting events found dispatching network-vif-plugged-ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1035.557593] env[61545]: WARNING nova.compute.manager [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received unexpected event network-vif-plugged-ce640f58-ba75-4bd9-8c39-40145ff6ac4e for instance with vm_state building and task_state spawning. [ 1035.557746] env[61545]: DEBUG nova.compute.manager [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received event network-changed-ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1035.557986] env[61545]: DEBUG nova.compute.manager [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Refreshing instance network info cache due to event network-changed-ce640f58-ba75-4bd9-8c39-40145ff6ac4e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1035.558177] env[61545]: DEBUG oslo_concurrency.lockutils [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] Acquiring lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.592895] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256320, 'name': CreateVM_Task, 'duration_secs': 0.389955} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.595526] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.595526] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.595526] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.595526] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1035.595526] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b266ca38-9580-486f-bbfc-e6232341dd4f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.603225] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1035.603225] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a1ebb6-6d72-fe78-ab80-2ddfb16462e7" [ 1035.603225] env[61545]: _type = "Task" [ 1035.603225] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.617708] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a1ebb6-6d72-fe78-ab80-2ddfb16462e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.680380] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b83887f-f276-46bc-ab24-6e332c1328e7 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "d980f421-03b5-4b0e-b547-a33031356d55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.584s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.730121] env[61545]: DEBUG nova.objects.base [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Object Instance<1be4da80-c9ee-424e-b4e3-bdd22eb0cd67> lazy-loaded attributes: info_cache,migration_context {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1035.731090] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8dcc22-9e7d-465f-afb4-466b8f307689 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.754667] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-635d8472-70b9-40ad-996c-842177b099e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.761120] env[61545]: DEBUG oslo_vmware.api [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1035.761120] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52930262-590b-7ec9-d671-d97662b4cfc9" [ 1035.761120] env[61545]: _type = "Task" [ 1035.761120] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.770693] env[61545]: DEBUG oslo_vmware.api [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52930262-590b-7ec9-d671-d97662b4cfc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.051565] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fdd7ac-a444-400f-a79c-db23ab8ddd7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.064519] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c674d77-e8ca-4dd4-8200-2162cfd90a30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.108945] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95cb4831-4ed6-49b6-b457-3a8c5f23eadf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.113198] env[61545]: DEBUG nova.network.neutron [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [{"id": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "address": "fa:16:3e:e1:1e:c2", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce640f58-ba", "ovs_interfaceid": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.126963] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1187fbf-1e12-421e-b9a5-eef94b67a9cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.133443] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a1ebb6-6d72-fe78-ab80-2ddfb16462e7, 'name': SearchDatastore_Task, 'duration_secs': 0.015116} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.133834] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.134114] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.134597] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.134655] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.134840] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.135821] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f19a51cb-8387-4b05-857e-8dbeb84bae6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.149129] env[61545]: DEBUG nova.compute.provider_tree [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.159865] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.160033] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.160869] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1661213e-e4fd-42e2-896e-26c01ccb4e26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.167678] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1036.167678] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52712cf0-90b2-8a31-761b-5958f9c72397" [ 1036.167678] env[61545]: _type = "Task" [ 1036.167678] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.177558] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52712cf0-90b2-8a31-761b-5958f9c72397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.252403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "2f8567b1-7291-4705-8ef3-23547eb4860e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.253442] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.273316] env[61545]: DEBUG oslo_vmware.api [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52930262-590b-7ec9-d671-d97662b4cfc9, 'name': SearchDatastore_Task, 'duration_secs': 0.009067} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.273637] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.333624] env[61545]: DEBUG nova.network.neutron [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Updated VIF entry in instance network info cache for port 6b10a96e-6545-4419-9cd0-afa5ce3e2cc2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.334059] env[61545]: DEBUG nova.network.neutron [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Updating instance_info_cache with network_info: [{"id": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "address": "fa:16:3e:28:6c:a3", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b10a96e-65", "ovs_interfaceid": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.406748] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e669ec2-1856-462d-90b0-42d2d83c2582 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.426819] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance '844f01ed-4dae-4e13-9d1c-09a73f413201' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1036.620097] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.620097] env[61545]: DEBUG nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Instance network_info: |[{"id": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "address": "fa:16:3e:e1:1e:c2", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce640f58-ba", "ovs_interfaceid": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1036.620097] env[61545]: DEBUG oslo_concurrency.lockutils [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] Acquired lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.621139] env[61545]: DEBUG nova.network.neutron [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Refreshing network info cache for port ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.622559] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:1e:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce640f58-ba75-4bd9-8c39-40145ff6ac4e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.634103] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Creating folder: Project (845539fcaa744e59b6eb695b8a257de4). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1036.638075] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b236663-f9b8-421f-955f-9bee22961622 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.659272] env[61545]: DEBUG nova.scheduler.client.report [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.663770] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Created folder: Project (845539fcaa744e59b6eb695b8a257de4) in parent group-v838542. [ 1036.663770] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Creating folder: Instances. Parent ref: group-v838771. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1036.664099] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-325ef2c8-38d9-4e98-9ba5-645b927cce4f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.678819] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Created folder: Instances in parent group-v838771. [ 1036.678819] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1036.679103] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.679180] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b423fce5-418b-43b4-ad9b-765889d7bdb4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.703896] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52712cf0-90b2-8a31-761b-5958f9c72397, 'name': SearchDatastore_Task, 'duration_secs': 0.012011} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.709910] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0803562e-3d72-40e7-be62-4e1f2ba6899a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.717319] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.717319] env[61545]: value = "task-4256323" [ 1036.717319] env[61545]: _type = "Task" [ 1036.717319] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.724282] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1036.724282] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527235d1-beb1-b4b3-74a4-12e21c4cd8ca" [ 1036.724282] env[61545]: _type = "Task" [ 1036.724282] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.734852] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256323, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.741413] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527235d1-beb1-b4b3-74a4-12e21c4cd8ca, 'name': SearchDatastore_Task, 'duration_secs': 0.014754} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.741629] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.741919] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8b9c45a7-3574-47c9-b46b-34eed554fdc8/8b9c45a7-3574-47c9-b46b-34eed554fdc8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.742191] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de37dc15-6f89-4fe6-b28d-208389443d3c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.752023] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1036.752023] env[61545]: value = "task-4256324" [ 1036.752023] env[61545]: _type = "Task" [ 1036.752023] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.757592] env[61545]: DEBUG nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1036.765621] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.838122] env[61545]: DEBUG oslo_concurrency.lockutils [req-e706239a-88ba-4c7d-9e93-bb0bd4bb4f68 req-0f5e2145-b3c4-4df9-9d21-3a98bc4ca8bd service nova] Releasing lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.933251] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.933590] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34e25189-2d61-45d7-9cf0-7683352c68f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.941404] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1036.941404] env[61545]: value = "task-4256325" [ 1036.941404] env[61545]: _type = "Task" [ 1036.941404] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.951565] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256325, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.017899] env[61545]: DEBUG nova.network.neutron [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updated VIF entry in instance network info cache for port ce640f58-ba75-4bd9-8c39-40145ff6ac4e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1037.018542] env[61545]: DEBUG nova.network.neutron [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [{"id": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "address": "fa:16:3e:e1:1e:c2", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce640f58-ba", "ovs_interfaceid": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.164571] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.165028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.165309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.165571] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.165967] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.168899] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.035s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.172136] env[61545]: INFO nova.compute.manager [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Terminating instance [ 1037.174496] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.868s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.176539] env[61545]: INFO nova.compute.claims [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.201230] env[61545]: INFO nova.scheduler.client.report [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted allocations for instance 5a610b1c-df03-4ca6-83ff-ba651edcc8d0 [ 1037.230183] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256323, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.266504] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256324, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.291575] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.452502] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256325, 'name': PowerOffVM_Task, 'duration_secs': 0.349739} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.452641] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.452840] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance '844f01ed-4dae-4e13-9d1c-09a73f413201' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1037.521573] env[61545]: DEBUG oslo_concurrency.lockutils [req-ac70f9a2-3382-4cde-a61a-e7825a185883 req-2732ee11-3aa0-43a2-bedf-6836f5ecea78 service nova] Releasing lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.587244] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "f9c9c447-e676-4143-b329-fb6d71bcd553" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.587529] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.588021] env[61545]: INFO nova.compute.manager [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Shelving [ 1037.685020] env[61545]: DEBUG nova.compute.manager [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1037.686128] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1037.687131] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ea824e-df50-45a8-acc9-d3569daf8f7c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.696377] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1037.697235] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-059fd73a-44c8-4f56-9d47-a894f62c5618 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.710910] env[61545]: DEBUG oslo_vmware.api [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1037.710910] env[61545]: value = "task-4256326" [ 1037.710910] env[61545]: _type = "Task" [ 1037.710910] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.711466] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1fcf6f83-04ae-43d9-b851-798833d2ca22 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "5a610b1c-df03-4ca6-83ff-ba651edcc8d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.855s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.727472] env[61545]: DEBUG oslo_vmware.api [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256326, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.734688] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256323, 'name': CreateVM_Task, 'duration_secs': 0.591109} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.735648] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.736491] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.736668] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.737061] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1037.737618] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26b3f1b9-a06b-4845-894b-4ddb5bdea48a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.745308] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1037.745308] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527528b1-58eb-c559-78a3-f9fe12803f1a" [ 1037.745308] env[61545]: _type = "Task" [ 1037.745308] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.755269] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527528b1-58eb-c559-78a3-f9fe12803f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.764840] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256324, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616503} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.765141] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 8b9c45a7-3574-47c9-b46b-34eed554fdc8/8b9c45a7-3574-47c9-b46b-34eed554fdc8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.765363] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.765640] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1187772-db69-40bd-875c-fcf411bdcb48 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.773796] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1037.773796] env[61545]: value = "task-4256327" [ 1037.773796] env[61545]: _type = "Task" [ 1037.773796] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.783911] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256327, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.960260] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1037.960614] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.960775] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1037.960962] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.961114] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1037.961262] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1037.961464] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1037.961702] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1037.961849] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1037.962052] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1037.962204] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1037.968154] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db5e8abc-e38c-401f-b49e-31bf079841e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.986106] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1037.986106] env[61545]: value = "task-4256328" [ 1037.986106] env[61545]: _type = "Task" [ 1037.986106] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.996882] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256328, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.230804] env[61545]: DEBUG oslo_vmware.api [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256326, 'name': PowerOffVM_Task, 'duration_secs': 0.219451} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.231176] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1038.231421] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1038.232307] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e303f24b-e9e3-4d43-9f9a-cf6d55becb9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.257217] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527528b1-58eb-c559-78a3-f9fe12803f1a, 'name': SearchDatastore_Task, 'duration_secs': 0.023419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.260346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.260593] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1038.260829] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.261038] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.261206] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.261638] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abc9d829-c98b-43ca-9f05-052ccbb41126 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.272051] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.272318] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1038.275572] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c259a9a-b948-44b7-93a9-a89426b83af5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.285804] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1038.285804] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521ae707-33b3-72ce-21cb-c865487c80c4" [ 1038.285804] env[61545]: _type = "Task" [ 1038.285804] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.290146] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256327, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079549} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.293624] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.297566] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d29962-a21f-4f9f-9a60-3353c1695e63 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.333206] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 8b9c45a7-3574-47c9-b46b-34eed554fdc8/8b9c45a7-3574-47c9-b46b-34eed554fdc8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.343110] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc6ad6ec-ad9b-4769-ba51-a2d5dd6bcc34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.360713] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521ae707-33b3-72ce-21cb-c865487c80c4, 'name': SearchDatastore_Task, 'duration_secs': 0.012388} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.360825] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1038.360982] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1038.361176] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleting the datastore file [datastore2] f7a16153-2ef7-4be4-90a2-5ad6616203f8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.362597] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02c18ea2-a70c-4b0e-89fd-2c5aa6befd39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.365954] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f58995a2-694a-4ec3-a913-f76fbc219904 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.372102] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1038.372102] env[61545]: value = "task-4256330" [ 1038.372102] env[61545]: _type = "Task" [ 1038.372102] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.379299] env[61545]: DEBUG oslo_vmware.api [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1038.379299] env[61545]: value = "task-4256331" [ 1038.379299] env[61545]: _type = "Task" [ 1038.379299] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.379639] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1038.379639] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52573952-fdb0-cfb3-b74d-5680582285b3" [ 1038.379639] env[61545]: _type = "Task" [ 1038.379639] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.394710] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256330, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.401537] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52573952-fdb0-cfb3-b74d-5680582285b3, 'name': SearchDatastore_Task, 'duration_secs': 0.018918} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.407859] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.408180] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 97b72809-2a1e-4eda-af82-71cac2d79a64/97b72809-2a1e-4eda-af82-71cac2d79a64.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.408573] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "578ce929-99fd-47ae-8275-e4ac9abe8d49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.408803] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "578ce929-99fd-47ae-8275-e4ac9abe8d49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.409020] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "578ce929-99fd-47ae-8275-e4ac9abe8d49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.409211] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "578ce929-99fd-47ae-8275-e4ac9abe8d49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.409485] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "578ce929-99fd-47ae-8275-e4ac9abe8d49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.411089] env[61545]: DEBUG oslo_vmware.api [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.411775] env[61545]: INFO nova.compute.manager [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Terminating instance [ 1038.413302] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f30039d0-c421-4026-b24f-91e9efdefe16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.423868] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1038.423868] env[61545]: value = "task-4256332" [ 1038.423868] env[61545]: _type = "Task" [ 1038.423868] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.439559] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.497245] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256328, 'name': ReconfigVM_Task, 'duration_secs': 0.324076} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.500766] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance '844f01ed-4dae-4e13-9d1c-09a73f413201' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1038.599885] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.601086] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d33607c3-41ed-435f-a9d5-e97d6aa1b6f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.611101] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1038.611101] env[61545]: value = "task-4256333" [ 1038.611101] env[61545]: _type = "Task" [ 1038.611101] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.623059] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.726851] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cc8a20-f76a-4e3f-9529-c1bf2cb628c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.736201] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0293152-29bd-4ff8-9552-2897422b8c03 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.780846] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89ddad9-0c20-40c8-b0fd-aee531466f99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.795474] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016783a1-028d-4901-a987-4c9872171190 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.819541] env[61545]: DEBUG nova.compute.provider_tree [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.883080] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256330, 'name': ReconfigVM_Task, 'duration_secs': 0.425699} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.887019] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 8b9c45a7-3574-47c9-b46b-34eed554fdc8/8b9c45a7-3574-47c9-b46b-34eed554fdc8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.887857] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87ff5aa8-f712-4067-8afc-add16c34e32c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.897833] env[61545]: DEBUG oslo_vmware.api [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201954} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.899102] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1038.899341] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1038.899523] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1038.899697] env[61545]: INFO nova.compute.manager [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1038.900086] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.900243] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1038.900243] env[61545]: value = "task-4256334" [ 1038.900243] env[61545]: _type = "Task" [ 1038.900243] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.900595] env[61545]: DEBUG nova.compute.manager [-] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1038.900595] env[61545]: DEBUG nova.network.neutron [-] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1038.915841] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256334, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.919042] env[61545]: DEBUG nova.compute.manager [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1038.919222] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1038.920306] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e292af41-2c4c-4404-b008-80771d9c4e4c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.930535] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.936099] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3de38005-4b49-4ce8-9c17-838a888d14f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.946743] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256332, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.949021] env[61545]: DEBUG oslo_vmware.api [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1038.949021] env[61545]: value = "task-4256335" [ 1038.949021] env[61545]: _type = "Task" [ 1038.949021] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.958881] env[61545]: DEBUG oslo_vmware.api [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.009448] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1039.009750] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.009923] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.011139] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.011139] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.011139] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1039.011139] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1039.011139] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1039.011473] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1039.011473] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1039.012765] env[61545]: DEBUG nova.virt.hardware [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1039.018207] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Reconfiguring VM instance instance-00000007 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1039.018598] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-449d47a6-4138-45e0-834f-16aa7ac0207f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.039416] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1039.039416] env[61545]: value = "task-4256336" [ 1039.039416] env[61545]: _type = "Task" [ 1039.039416] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.051427] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256336, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.083253] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.083510] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.124204] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256333, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.325494] env[61545]: DEBUG nova.scheduler.client.report [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.398751] env[61545]: DEBUG nova.compute.manager [req-62b02108-29b0-4f03-a079-6ad41d3cf85a req-c4c119a0-0ee1-4d67-9f32-39d689cda753 service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Received event network-vif-deleted-a63c04a1-28a0-46f2-a336-ab01754e90b6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1039.399476] env[61545]: INFO nova.compute.manager [req-62b02108-29b0-4f03-a079-6ad41d3cf85a req-c4c119a0-0ee1-4d67-9f32-39d689cda753 service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Neutron deleted interface a63c04a1-28a0-46f2-a336-ab01754e90b6; detaching it from the instance and deleting it from the info cache [ 1039.400019] env[61545]: DEBUG nova.network.neutron [req-62b02108-29b0-4f03-a079-6ad41d3cf85a req-c4c119a0-0ee1-4d67-9f32-39d689cda753 service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.417129] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256334, 'name': Rename_Task, 'duration_secs': 0.220748} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.417439] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1039.417696] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1864235-fa5c-402b-963c-cdeb359edf9f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.426557] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1039.426557] env[61545]: value = "task-4256337" [ 1039.426557] env[61545]: _type = "Task" [ 1039.426557] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.440328] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256332, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612486} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.444111] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 97b72809-2a1e-4eda-af82-71cac2d79a64/97b72809-2a1e-4eda-af82-71cac2d79a64.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1039.444374] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1039.445081] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.445330] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0751a3b1-93ed-474a-a87a-aa826274b922 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.455686] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1039.455686] env[61545]: value = "task-4256338" [ 1039.455686] env[61545]: _type = "Task" [ 1039.455686] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.464654] env[61545]: DEBUG oslo_vmware.api [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256335, 'name': PowerOffVM_Task, 'duration_secs': 0.247817} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.468024] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.468218] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1039.468513] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256338, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.468790] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c12fe401-1028-4ad7-b282-ea55d4917f3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.541285] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1039.541671] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1039.542790] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleting the datastore file [datastore2] 578ce929-99fd-47ae-8275-e4ac9abe8d49 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1039.546846] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a45e9d74-15c5-476e-a591-c73e7d1a9163 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.555587] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256336, 'name': ReconfigVM_Task, 'duration_secs': 0.297211} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.557577] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Reconfigured VM instance instance-00000007 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1039.558207] env[61545]: DEBUG oslo_vmware.api [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1039.558207] env[61545]: value = "task-4256340" [ 1039.558207] env[61545]: _type = "Task" [ 1039.558207] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.559226] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3013d7-b0f3-4ddf-857d-f16601d5843e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.590966] env[61545]: DEBUG nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1039.608680] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 844f01ed-4dae-4e13-9d1c-09a73f413201/844f01ed-4dae-4e13-9d1c-09a73f413201.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.614293] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3759955d-39f0-419a-9f95-dba2b15d86b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.638991] env[61545]: DEBUG oslo_vmware.api [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.652609] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256333, 'name': PowerOffVM_Task, 'duration_secs': 0.533821} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.655166] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.655597] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1039.655597] env[61545]: value = "task-4256341" [ 1039.655597] env[61545]: _type = "Task" [ 1039.655597] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.656497] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff389e9-9e5a-4c36-afc9-52252e98d6bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.670858] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256341, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.693042] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba85cbca-0e30-4a4c-a895-20e62c05129f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.832598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.832910] env[61545]: DEBUG nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1039.835781] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.913s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.836157] env[61545]: DEBUG nova.objects.instance [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'resources' on Instance uuid 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.876456] env[61545]: DEBUG nova.network.neutron [-] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.906146] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8960c114-796c-4deb-9153-10ecc31606a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.916647] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e9172e-f522-4a23-bd48-3bbca1fb5447 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.939157] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256337, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.961129] env[61545]: DEBUG nova.compute.manager [req-62b02108-29b0-4f03-a079-6ad41d3cf85a req-c4c119a0-0ee1-4d67-9f32-39d689cda753 service nova] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Detach interface failed, port_id=a63c04a1-28a0-46f2-a336-ab01754e90b6, reason: Instance f7a16153-2ef7-4be4-90a2-5ad6616203f8 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1039.970670] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256338, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07339} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.971050] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.974927] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0607c13e-a19a-4024-a871-89e5bd81225a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.001034] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 97b72809-2a1e-4eda-af82-71cac2d79a64/97b72809-2a1e-4eda-af82-71cac2d79a64.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.001034] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e3f59ce-5202-4c63-9d86-8fa0ba2ed345 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.024648] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1040.024648] env[61545]: value = "task-4256342" [ 1040.024648] env[61545]: _type = "Task" [ 1040.024648] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.036759] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256342, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.079521] env[61545]: DEBUG oslo_vmware.api [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317651} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.080113] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.080471] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.080736] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.080942] env[61545]: INFO nova.compute.manager [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1040.081307] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.081591] env[61545]: DEBUG nova.compute.manager [-] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1040.081719] env[61545]: DEBUG nova.network.neutron [-] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.142271] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.170372] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256341, 'name': ReconfigVM_Task, 'duration_secs': 0.50083} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.170713] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 844f01ed-4dae-4e13-9d1c-09a73f413201/844f01ed-4dae-4e13-9d1c-09a73f413201.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.171041] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance '844f01ed-4dae-4e13-9d1c-09a73f413201' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1040.206406] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1040.206738] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a859d245-a1bb-4257-bca6-8f60bc68892e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.214815] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1040.214815] env[61545]: value = "task-4256343" [ 1040.214815] env[61545]: _type = "Task" [ 1040.214815] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.227696] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256343, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.340541] env[61545]: DEBUG nova.compute.utils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1040.341967] env[61545]: DEBUG nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1040.343234] env[61545]: DEBUG nova.network.neutron [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1040.345703] env[61545]: DEBUG nova.objects.instance [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'numa_topology' on Instance uuid 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.380564] env[61545]: INFO nova.compute.manager [-] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Took 1.48 seconds to deallocate network for instance. [ 1040.422424] env[61545]: DEBUG nova.policy [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f01e33a652314f70a08ae1a8087a54cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f4ccb92c3be47f18fd65a22a5a1ad94', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1040.441878] env[61545]: DEBUG oslo_vmware.api [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256337, 'name': PowerOnVM_Task, 'duration_secs': 0.720905} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.446355] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1040.446618] env[61545]: INFO nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Took 10.66 seconds to spawn the instance on the hypervisor. [ 1040.446805] env[61545]: DEBUG nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1040.448109] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795646dd-a605-463f-9108-9c9c7367fa00 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.535806] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256342, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.679994] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae825f72-4eba-4a8f-9d6e-184b061727ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.705396] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a2272d-b701-45d2-af6a-09587fe31fa9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.733602] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance '844f01ed-4dae-4e13-9d1c-09a73f413201' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1040.747252] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256343, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.759147] env[61545]: DEBUG nova.network.neutron [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Successfully created port: b8a4b8ef-6552-4028-a715-5b4022737ea0 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.848176] env[61545]: DEBUG nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1040.851375] env[61545]: DEBUG nova.objects.base [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Object Instance<4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42> lazy-loaded attributes: resources,numa_topology {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1040.889289] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.974171] env[61545]: INFO nova.compute.manager [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Took 25.65 seconds to build instance. [ 1040.981517] env[61545]: DEBUG nova.network.neutron [-] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.039306] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256342, 'name': ReconfigVM_Task, 'duration_secs': 0.62051} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.040334] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 97b72809-2a1e-4eda-af82-71cac2d79a64/97b72809-2a1e-4eda-af82-71cac2d79a64.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.040334] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d38390e-22dd-4d7e-b598-d11adca30bd2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.047914] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1041.047914] env[61545]: value = "task-4256344" [ 1041.047914] env[61545]: _type = "Task" [ 1041.047914] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.057901] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256344, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.250956] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256343, 'name': CreateSnapshot_Task, 'duration_secs': 0.780348} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.255464] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1041.257699] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0af00d-f3a0-42b6-a94f-d63b7b56995c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.332472] env[61545]: DEBUG nova.network.neutron [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Port a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1041.332854] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f7cb21-1fa6-814a-27fe-c1efdce46c00/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1041.333811] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ee9ce0-b68b-4f0f-80ed-5cfb6efcf842 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.345466] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f7cb21-1fa6-814a-27fe-c1efdce46c00/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1041.345699] env[61545]: ERROR oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f7cb21-1fa6-814a-27fe-c1efdce46c00/disk-0.vmdk due to incomplete transfer. [ 1041.346123] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2bfce11d-72ad-4be7-a0a9-cde7b965bba1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.360992] env[61545]: DEBUG oslo_vmware.rw_handles [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f7cb21-1fa6-814a-27fe-c1efdce46c00/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1041.361104] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Uploaded image 2db3fa56-39d4-433c-8da2-93da32a05392 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1041.363653] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1041.367369] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1e38dada-46da-4fea-912b-8e77f17cdb72 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.375717] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1041.375717] env[61545]: value = "task-4256345" [ 1041.375717] env[61545]: _type = "Task" [ 1041.375717] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.383202] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3532931-e3f9-43dd-b37b-cf5e8de25f02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.390640] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256345, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.399167] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da30551-ec0e-42d6-8f46-4ca9af27c329 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.435751] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f1de3c-2c30-4d8d-a6e1-901100504020 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.444845] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7069e77a-d191-4284-ac31-cf4906638f75 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.463381] env[61545]: DEBUG nova.compute.provider_tree [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.476220] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f00a929a-5f4e-4258-b417-a950d848c08a tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.168s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.484588] env[61545]: INFO nova.compute.manager [-] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Took 1.40 seconds to deallocate network for instance. [ 1041.559787] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256344, 'name': Rename_Task, 'duration_secs': 0.237772} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.560254] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.560587] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e64e97cc-ef4d-44f9-9169-1c538d08593b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.569072] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1041.569072] env[61545]: value = "task-4256346" [ 1041.569072] env[61545]: _type = "Task" [ 1041.569072] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.580159] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.621532] env[61545]: DEBUG nova.compute.manager [req-7673f2e3-2e7e-418d-b6d7-9e8b55fc6dd1 req-9569ce58-8202-49fd-bb17-1f06f80881f7 service nova] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Received event network-vif-deleted-f7745e62-9a91-4729-af18-5a9f49312659 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1041.792771] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1041.792926] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a2aa7a8e-c1c0-403e-b837-bb8b5cf6dd33 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.803614] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1041.803614] env[61545]: value = "task-4256347" [ 1041.803614] env[61545]: _type = "Task" [ 1041.803614] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.815634] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256347, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.863380] env[61545]: DEBUG nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1041.891144] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256345, 'name': Destroy_Task, 'duration_secs': 0.440276} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.891490] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Destroyed the VM [ 1041.891848] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1041.893455] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4ed366ad-0689-4e82-b611-34e91a5caa81 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.904669] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1041.904669] env[61545]: value = "task-4256348" [ 1041.904669] env[61545]: _type = "Task" [ 1041.904669] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.920297] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256348, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.920828] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1041.921235] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.921543] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1041.922407] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.922407] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1041.922619] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1041.923163] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1041.923491] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1041.923829] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1041.924150] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1041.924434] env[61545]: DEBUG nova.virt.hardware [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1041.925798] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b8920a-0546-4e98-b95c-90ef094a4c53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.936687] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c673eb-2dcf-4a6c-bf2f-2800f6413e5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.967456] env[61545]: DEBUG nova.scheduler.client.report [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.993565] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.080563] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256346, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.318564] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256347, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.358499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.358833] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.358833] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.368280] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.369102] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.369102] env[61545]: INFO nova.compute.manager [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Shelving [ 1042.415894] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256348, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.476270] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.640s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.479887] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.046s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.481649] env[61545]: INFO nova.compute.claims [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.584245] env[61545]: DEBUG oslo_vmware.api [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256346, 'name': PowerOnVM_Task, 'duration_secs': 0.529828} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.584997] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.585396] env[61545]: INFO nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Took 10.08 seconds to spawn the instance on the hypervisor. [ 1042.585732] env[61545]: DEBUG nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1042.586702] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfba5cd-cffa-40e3-ae74-87116dc52e47 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.822025] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256347, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.920251] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256348, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.956206] env[61545]: DEBUG nova.network.neutron [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Successfully updated port: b8a4b8ef-6552-4028-a715-5b4022737ea0 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.988965] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2772715d-6b83-4423-9fb6-958c18a87afc tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 41.064s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.992498] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 8.597s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.992498] env[61545]: INFO nova.compute.manager [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Unshelving [ 1043.115163] env[61545]: INFO nova.compute.manager [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Took 26.92 seconds to build instance. [ 1043.316962] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256347, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.386183] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.386521] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fbb5181-c10c-4d08-97b7-70a1c8ed3d95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.396996] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1043.396996] env[61545]: value = "task-4256349" [ 1043.396996] env[61545]: _type = "Task" [ 1043.396996] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.411225] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.420142] env[61545]: DEBUG oslo_vmware.api [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256348, 'name': RemoveSnapshot_Task, 'duration_secs': 1.057812} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.422473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.423358] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.423573] env[61545]: DEBUG nova.network.neutron [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.424944] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1043.425174] env[61545]: INFO nova.compute.manager [None req-7d6ef526-aabf-4b59-8603-e1fc5337c686 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Took 16.32 seconds to snapshot the instance on the hypervisor. [ 1043.460494] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "refresh_cache-b33e29cc-fe26-429a-8799-8d790667cc1d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.460583] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "refresh_cache-b33e29cc-fe26-429a-8799-8d790667cc1d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.460798] env[61545]: DEBUG nova.network.neutron [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.616782] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0e489aa8-50d1-4a7a-94b3-3aaad9d03058 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.432s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.820391] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256347, 'name': CloneVM_Task, 'duration_secs': 1.526764} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.823590] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Created linked-clone VM from snapshot [ 1043.824987] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb0dd5a-d721-49de-b87d-2851d7b605ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.834942] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Uploading image 623bd67d-a8b3-4e1b-9169-f7cd1bc393b7 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1043.887181] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1043.887181] env[61545]: value = "vm-838775" [ 1043.887181] env[61545]: _type = "VirtualMachine" [ 1043.887181] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1043.887692] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d3e10b6c-7b15-4998-b13c-051baea1a5bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.896598] env[61545]: DEBUG nova.compute.manager [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Received event network-vif-plugged-b8a4b8ef-6552-4028-a715-5b4022737ea0 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1043.897017] env[61545]: DEBUG oslo_concurrency.lockutils [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] Acquiring lock "b33e29cc-fe26-429a-8799-8d790667cc1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.897482] env[61545]: DEBUG oslo_concurrency.lockutils [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] Lock "b33e29cc-fe26-429a-8799-8d790667cc1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.897769] env[61545]: DEBUG oslo_concurrency.lockutils [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] Lock "b33e29cc-fe26-429a-8799-8d790667cc1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.898147] env[61545]: DEBUG nova.compute.manager [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] No waiting events found dispatching network-vif-plugged-b8a4b8ef-6552-4028-a715-5b4022737ea0 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.898477] env[61545]: WARNING nova.compute.manager [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Received unexpected event network-vif-plugged-b8a4b8ef-6552-4028-a715-5b4022737ea0 for instance with vm_state building and task_state spawning. [ 1043.898794] env[61545]: DEBUG nova.compute.manager [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Received event network-changed-b8a4b8ef-6552-4028-a715-5b4022737ea0 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1043.899170] env[61545]: DEBUG nova.compute.manager [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Refreshing instance network info cache due to event network-changed-b8a4b8ef-6552-4028-a715-5b4022737ea0. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1043.899883] env[61545]: DEBUG oslo_concurrency.lockutils [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] Acquiring lock "refresh_cache-b33e29cc-fe26-429a-8799-8d790667cc1d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.909864] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lease: (returnval){ [ 1043.909864] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52acb56c-de32-8b90-fcde-186003c50954" [ 1043.909864] env[61545]: _type = "HttpNfcLease" [ 1043.909864] env[61545]: } obtained for exporting VM: (result){ [ 1043.909864] env[61545]: value = "vm-838775" [ 1043.909864] env[61545]: _type = "VirtualMachine" [ 1043.909864] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1043.910942] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the lease: (returnval){ [ 1043.910942] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52acb56c-de32-8b90-fcde-186003c50954" [ 1043.910942] env[61545]: _type = "HttpNfcLease" [ 1043.910942] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1043.923721] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256349, 'name': PowerOffVM_Task, 'duration_secs': 0.356248} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.924803] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.925649] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60332ab-1ddb-4bf3-b96e-15eb5fd76d39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.934517] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1043.934517] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52acb56c-de32-8b90-fcde-186003c50954" [ 1043.934517] env[61545]: _type = "HttpNfcLease" [ 1043.934517] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1043.937477] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1043.937477] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52acb56c-de32-8b90-fcde-186003c50954" [ 1043.937477] env[61545]: _type = "HttpNfcLease" [ 1043.937477] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1043.942053] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6bb85e-5d69-4187-9ff7-72872d017727 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.961407] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507b7b8d-ec86-4921-8496-eaf32d6c1f93 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.964685] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe38796-e7f0-4254-9b9a-cf90fedb9e63 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.972588] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524d80e0-3f47-6ba8-c5e7-d8471969bb0e/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1043.972831] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524d80e0-3f47-6ba8-c5e7-d8471969bb0e/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1044.028833] env[61545]: DEBUG nova.compute.utils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1044.038278] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0768954-e225-4bcf-9fff-cac73468a6e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.073730] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e07bbe-2ca5-426e-968d-80b5d0f4b252 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.082536] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14681d21-c8b6-4a78-8a18-5d24ff169fd6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.098137] env[61545]: DEBUG nova.compute.provider_tree [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.176857] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1de906b8-7ed4-46e5-80c7-da285ff0f1a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.344681] env[61545]: DEBUG nova.network.neutron [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1044.540782] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1044.547035] env[61545]: INFO nova.virt.block_device [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Booting with volume 9009d07b-81b0-4ba0-ae46-44590740ed11 at /dev/sdb [ 1044.551859] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ebdc5e6e-64b0-49c0-8f74-2ea905268c89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.564653] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1044.564653] env[61545]: value = "task-4256351" [ 1044.564653] env[61545]: _type = "Task" [ 1044.564653] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.581692] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256351, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.602254] env[61545]: DEBUG nova.scheduler.client.report [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1044.623745] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ad67c19-549e-4413-a287-42b428cc0916 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.640518] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430674bd-6a32-4cf1-a820-0b17c5b30531 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.723815] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3daf0524-4367-48f7-b4ae-bfc77dee8a16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.739553] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d15a63a-f01f-4a25-83b6-6eb35c57a6e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.791915] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e32374f-f936-4d99-b180-5714d05cd09a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.802329] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccead137-d41e-491e-b0af-ce2fad1e5aa0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.819481] env[61545]: DEBUG nova.virt.block_device [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating existing volume attachment record: 80cbdd79-308c-4e76-ad16-ca2f9ac0f4c8 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1045.084753] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256351, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.093571] env[61545]: DEBUG nova.network.neutron [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Updating instance_info_cache with network_info: [{"id": "b8a4b8ef-6552-4028-a715-5b4022737ea0", "address": "fa:16:3e:b8:42:6d", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a4b8ef-65", "ovs_interfaceid": "b8a4b8ef-6552-4028-a715-5b4022737ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.112891] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.113599] env[61545]: DEBUG nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1045.118426] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.892s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.278311] env[61545]: DEBUG nova.network.neutron [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.580092] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256351, 'name': CreateSnapshot_Task, 'duration_secs': 0.871686} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.580561] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1045.581395] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab8b9ec-2185-4c10-98d4-5559396e2744 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.596616] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "refresh_cache-b33e29cc-fe26-429a-8799-8d790667cc1d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.596936] env[61545]: DEBUG nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Instance network_info: |[{"id": "b8a4b8ef-6552-4028-a715-5b4022737ea0", "address": "fa:16:3e:b8:42:6d", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a4b8ef-65", "ovs_interfaceid": "b8a4b8ef-6552-4028-a715-5b4022737ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1045.598366] env[61545]: DEBUG oslo_concurrency.lockutils [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] Acquired lock "refresh_cache-b33e29cc-fe26-429a-8799-8d790667cc1d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.599648] env[61545]: DEBUG nova.network.neutron [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Refreshing network info cache for port b8a4b8ef-6552-4028-a715-5b4022737ea0 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1045.600880] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:42:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8a4b8ef-6552-4028-a715-5b4022737ea0', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.621712] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.624072] env[61545]: DEBUG nova.compute.utils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1045.626150] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1045.642775] env[61545]: DEBUG nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1045.642775] env[61545]: DEBUG nova.network.neutron [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1045.647151] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a51c83d4-44ca-4dbc-a0a6-48e0b7e37d72 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.668956] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.668956] env[61545]: value = "task-4256355" [ 1045.668956] env[61545]: _type = "Task" [ 1045.668956] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.682517] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256355, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.774607] env[61545]: DEBUG nova.policy [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0877daa3feb540548759749579ad5f58', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ce3ab2359464ab9ad254a721e1aeb47', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1045.783479] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.102253] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1046.104770] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ac651040-e327-413a-a296-d18b80850b91 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.114131] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1046.114131] env[61545]: value = "task-4256356" [ 1046.114131] env[61545]: _type = "Task" [ 1046.114131] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.123258] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256356, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.126969] env[61545]: DEBUG nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1046.140412] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Applying migration context for instance 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 as it has an incoming, in-progress migration e866c452-7774-4a83-9c60-8d591a4adec9. Migration status is confirming {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1046.140662] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Applying migration context for instance 844f01ed-4dae-4e13-9d1c-09a73f413201 as it has an incoming, in-progress migration 4a8c88d2-213c-4f97-884a-8726cbc598fe. Migration status is post-migrating {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1046.143701] env[61545]: INFO nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating resource usage from migration e866c452-7774-4a83-9c60-8d591a4adec9 [ 1046.143701] env[61545]: INFO nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating resource usage from migration 4a8c88d2-213c-4f97-884a-8726cbc598fe [ 1046.175112] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.175264] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 8d838d3b-32ad-4bb2-839e-6bd81c363447 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.175411] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 16bc91d0-71c3-4bd9-980b-6574c3fd9335 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.175527] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e21de424-8121-4e2f-84c2-8096ba8048cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.175694] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance f7a16153-2ef7-4be4-90a2-5ad6616203f8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1046.175839] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance b1277c3b-cd7b-43be-9eff-640145dde5e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.175987] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance c2bb4ea0-e9fb-4198-80fa-acfd25fb226d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.176145] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance f9c9c447-e676-4143-b329-fb6d71bcd553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.176303] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1e5be92c-d727-4515-9e16-85ade2719455 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.177355] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 578ce929-99fd-47ae-8275-e4ac9abe8d49 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1046.177355] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d0f42893-3332-4027-93df-bb46e3350485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.177355] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.177355] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 855904d8-7eb3-405d-9236-ab4ba9b33940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.177355] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 7301c541-664f-43ec-8a34-86f38cac22ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.177355] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Migration e866c452-7774-4a83-9c60-8d591a4adec9 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1046.177355] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.177355] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 8b9c45a7-3574-47c9-b46b-34eed554fdc8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.178513] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 97b72809-2a1e-4eda-af82-71cac2d79a64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.178513] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Migration 4a8c88d2-213c-4f97-884a-8726cbc598fe is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1046.178513] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 844f01ed-4dae-4e13-9d1c-09a73f413201 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.178513] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance b33e29cc-fe26-429a-8799-8d790667cc1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.178513] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 91eeceeb-c11e-414b-8ae6-e68e927f1f1e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.186212] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256355, 'name': CreateVM_Task, 'duration_secs': 0.357243} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.186379] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1046.187300] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.187545] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.188341] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1046.189105] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71eb6d2f-8714-416f-b6f1-2488b2f1bc30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.196116] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1046.196116] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52406970-b824-e77e-b838-036644eea204" [ 1046.196116] env[61545]: _type = "Task" [ 1046.196116] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.206139] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52406970-b824-e77e-b838-036644eea204, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.315803] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddc844d-8f3f-41c0-b2cf-fabd8ba82af5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.347643] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1f5974-0920-41db-b319-363413796378 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.359883] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance '844f01ed-4dae-4e13-9d1c-09a73f413201' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1046.460455] env[61545]: DEBUG nova.compute.manager [req-2fffd4bd-6889-4612-9c57-ab2e49fe096b req-50adea3a-251e-4572-beb5-16bed7c1e616 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received event network-changed-ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1046.460656] env[61545]: DEBUG nova.compute.manager [req-2fffd4bd-6889-4612-9c57-ab2e49fe096b req-50adea3a-251e-4572-beb5-16bed7c1e616 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Refreshing instance network info cache due to event network-changed-ce640f58-ba75-4bd9-8c39-40145ff6ac4e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1046.460886] env[61545]: DEBUG oslo_concurrency.lockutils [req-2fffd4bd-6889-4612-9c57-ab2e49fe096b req-50adea3a-251e-4572-beb5-16bed7c1e616 service nova] Acquiring lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.461181] env[61545]: DEBUG oslo_concurrency.lockutils [req-2fffd4bd-6889-4612-9c57-ab2e49fe096b req-50adea3a-251e-4572-beb5-16bed7c1e616 service nova] Acquired lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.461393] env[61545]: DEBUG nova.network.neutron [req-2fffd4bd-6889-4612-9c57-ab2e49fe096b req-50adea3a-251e-4572-beb5-16bed7c1e616 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Refreshing network info cache for port ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1046.626281] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256356, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.666225] env[61545]: DEBUG nova.network.neutron [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Successfully created port: 5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1046.683480] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.692163] env[61545]: DEBUG nova.network.neutron [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Updated VIF entry in instance network info cache for port b8a4b8ef-6552-4028-a715-5b4022737ea0. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.693909] env[61545]: DEBUG nova.network.neutron [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Updating instance_info_cache with network_info: [{"id": "b8a4b8ef-6552-4028-a715-5b4022737ea0", "address": "fa:16:3e:b8:42:6d", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a4b8ef-65", "ovs_interfaceid": "b8a4b8ef-6552-4028-a715-5b4022737ea0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.712682] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52406970-b824-e77e-b838-036644eea204, 'name': SearchDatastore_Task, 'duration_secs': 0.020357} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.713069] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.713376] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.713570] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.713751] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.713942] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.714851] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28a03a1e-3cb1-4202-ac67-6ce302d647e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.729078] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.729432] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.730958] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e6bf691-4cd6-4df0-a427-ea55665fac09 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.740710] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1046.740710] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5273e4b2-2ef0-3ae5-d070-0f86b668d5ad" [ 1046.740710] env[61545]: _type = "Task" [ 1046.740710] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.751735] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5273e4b2-2ef0-3ae5-d070-0f86b668d5ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.865723] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1046.865901] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6967fc35-4639-475e-8f3d-b5fc5e5d5344 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.874550] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1046.874550] env[61545]: value = "task-4256357" [ 1046.874550] env[61545]: _type = "Task" [ 1046.874550] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.883933] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.126883] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256356, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.137453] env[61545]: DEBUG nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1047.169662] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.169916] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.170092] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.170323] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.170439] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.170588] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.170930] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.171146] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.171329] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.171703] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.172055] env[61545]: DEBUG nova.virt.hardware [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.173102] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213228dd-89ae-44e6-9f43-3e3f108065d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.184921] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d46eab-4dcf-4638-a8ad-44849eb8865d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.192068] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 2f8567b1-7291-4705-8ef3-23547eb4860e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1047.204252] env[61545]: DEBUG oslo_concurrency.lockutils [req-7b09d3fd-615b-4751-a4b1-154f9d2f0bd2 req-a217a9ab-e537-416c-9713-49257fb475f5 service nova] Releasing lock "refresh_cache-b33e29cc-fe26-429a-8799-8d790667cc1d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.252643] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5273e4b2-2ef0-3ae5-d070-0f86b668d5ad, 'name': SearchDatastore_Task, 'duration_secs': 0.02435} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.253985] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0eff388-f282-424f-a726-4f716abacd01 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.262467] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1047.262467] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5292b4ea-1a5c-3b2f-93d8-8cc5f06a9eb6" [ 1047.262467] env[61545]: _type = "Task" [ 1047.262467] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.275225] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5292b4ea-1a5c-3b2f-93d8-8cc5f06a9eb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.389236] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256357, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.582523] env[61545]: DEBUG nova.network.neutron [req-2fffd4bd-6889-4612-9c57-ab2e49fe096b req-50adea3a-251e-4572-beb5-16bed7c1e616 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updated VIF entry in instance network info cache for port ce640f58-ba75-4bd9-8c39-40145ff6ac4e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1047.582913] env[61545]: DEBUG nova.network.neutron [req-2fffd4bd-6889-4612-9c57-ab2e49fe096b req-50adea3a-251e-4572-beb5-16bed7c1e616 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [{"id": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "address": "fa:16:3e:e1:1e:c2", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce640f58-ba", "ovs_interfaceid": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.629191] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256356, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.695392] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance db2d0e21-f6bb-4f61-8d54-e9191de13a59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1047.695721] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1047.695897] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4480MB phys_disk=250GB used_disk=19GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1047.781271] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5292b4ea-1a5c-3b2f-93d8-8cc5f06a9eb6, 'name': SearchDatastore_Task, 'duration_secs': 0.014632} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.781518] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.781774] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] b33e29cc-fe26-429a-8799-8d790667cc1d/b33e29cc-fe26-429a-8799-8d790667cc1d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1047.782400] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e4c27d9-76ef-434f-a327-c9e49161c556 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.796781] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1047.796781] env[61545]: value = "task-4256359" [ 1047.796781] env[61545]: _type = "Task" [ 1047.796781] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.807689] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.887336] env[61545]: DEBUG oslo_vmware.api [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256357, 'name': PowerOnVM_Task, 'duration_secs': 0.58488} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.887692] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.887912] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1d64e8ab-fc10-48cc-98a9-e6f9a5f1b3cf tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance '844f01ed-4dae-4e13-9d1c-09a73f413201' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1048.085764] env[61545]: DEBUG oslo_concurrency.lockutils [req-2fffd4bd-6889-4612-9c57-ab2e49fe096b req-50adea3a-251e-4572-beb5-16bed7c1e616 service nova] Releasing lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.095139] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf02355-4557-44ff-b38c-111fa36b0755 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.105082] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac78316-e35b-4300-a30e-5d616bc40ddd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.143167] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f0bbe1-e05f-40b8-82a2-aa7676a36f89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.155733] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6ea688-681f-48c2-8b61-153d7fabaf5c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.160185] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256356, 'name': CloneVM_Task, 'duration_secs': 1.545281} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.160507] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Created linked-clone VM from snapshot [ 1048.161745] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f420a3-2dbf-4782-b404-8716afd90e07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.172704] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.181214] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Uploading image 5034b2e2-dbdc-4463-8a43-60cf580bf4f3 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1048.211678] env[61545]: DEBUG oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1048.211678] env[61545]: value = "vm-838780" [ 1048.211678] env[61545]: _type = "VirtualMachine" [ 1048.211678] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1048.211678] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ba968ef2-db0b-42a6-b0de-fa557ff324a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.221571] env[61545]: DEBUG oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lease: (returnval){ [ 1048.221571] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5226b46e-6d34-b99d-d0bf-f795403d4c22" [ 1048.221571] env[61545]: _type = "HttpNfcLease" [ 1048.221571] env[61545]: } obtained for exporting VM: (result){ [ 1048.221571] env[61545]: value = "vm-838780" [ 1048.221571] env[61545]: _type = "VirtualMachine" [ 1048.221571] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1048.222090] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the lease: (returnval){ [ 1048.222090] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5226b46e-6d34-b99d-d0bf-f795403d4c22" [ 1048.222090] env[61545]: _type = "HttpNfcLease" [ 1048.222090] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1048.231601] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1048.231601] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5226b46e-6d34-b99d-d0bf-f795403d4c22" [ 1048.231601] env[61545]: _type = "HttpNfcLease" [ 1048.231601] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1048.307422] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.676443] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.730906] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1048.730906] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5226b46e-6d34-b99d-d0bf-f795403d4c22" [ 1048.730906] env[61545]: _type = "HttpNfcLease" [ 1048.730906] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1048.731270] env[61545]: DEBUG oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1048.731270] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5226b46e-6d34-b99d-d0bf-f795403d4c22" [ 1048.731270] env[61545]: _type = "HttpNfcLease" [ 1048.731270] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1048.731984] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b365c3d-0f14-4be6-990f-320c5d320eca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.740763] env[61545]: DEBUG oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b79560-58b2-4f55-390c-22232cc03849/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1048.740992] env[61545]: DEBUG oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b79560-58b2-4f55-390c-22232cc03849/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1048.811218] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256359, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.868935] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8bd888fe-d11c-4055-97fc-ffe7a67f234a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.172923] env[61545]: DEBUG nova.network.neutron [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Successfully updated port: 5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1049.184502] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1049.184810] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.066s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.185540] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.912s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.311221] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256359, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.346114} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.311925] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] b33e29cc-fe26-429a-8799-8d790667cc1d/b33e29cc-fe26-429a-8799-8d790667cc1d.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1049.311925] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.312164] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28b0485a-e493-4f2e-89a4-59b38331bf45 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.320800] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1049.320800] env[61545]: value = "task-4256361" [ 1049.320800] env[61545]: _type = "Task" [ 1049.320800] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.333820] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.398718] env[61545]: DEBUG nova.compute.manager [req-e0d14e41-5b71-43a7-a90d-928eb516207a req-88dedac7-414a-4f5f-92ca-0599a01f9898 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Received event network-vif-plugged-5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1049.398718] env[61545]: DEBUG oslo_concurrency.lockutils [req-e0d14e41-5b71-43a7-a90d-928eb516207a req-88dedac7-414a-4f5f-92ca-0599a01f9898 service nova] Acquiring lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.398718] env[61545]: DEBUG oslo_concurrency.lockutils [req-e0d14e41-5b71-43a7-a90d-928eb516207a req-88dedac7-414a-4f5f-92ca-0599a01f9898 service nova] Lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.398718] env[61545]: DEBUG oslo_concurrency.lockutils [req-e0d14e41-5b71-43a7-a90d-928eb516207a req-88dedac7-414a-4f5f-92ca-0599a01f9898 service nova] Lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.398718] env[61545]: DEBUG nova.compute.manager [req-e0d14e41-5b71-43a7-a90d-928eb516207a req-88dedac7-414a-4f5f-92ca-0599a01f9898 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] No waiting events found dispatching network-vif-plugged-5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1049.398718] env[61545]: WARNING nova.compute.manager [req-e0d14e41-5b71-43a7-a90d-928eb516207a req-88dedac7-414a-4f5f-92ca-0599a01f9898 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Received unexpected event network-vif-plugged-5f4a8e1e-aa1b-4785-8467-54496ef2257b for instance with vm_state building and task_state spawning. [ 1049.681673] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquiring lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.681673] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquired lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.681673] env[61545]: DEBUG nova.network.neutron [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.793550] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Acquiring lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.793943] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.794211] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Acquiring lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.794979] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.795237] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.800916] env[61545]: INFO nova.compute.manager [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Terminating instance [ 1049.835338] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074977} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.836886] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1049.841694] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b6541a-96af-4236-ba96-56cb00af0f30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.874385] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] b33e29cc-fe26-429a-8799-8d790667cc1d/b33e29cc-fe26-429a-8799-8d790667cc1d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1049.878657] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3648b930-0958-4fd4-bb68-1bc5bcce9312 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.906678] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1049.906678] env[61545]: value = "task-4256362" [ 1049.906678] env[61545]: _type = "Task" [ 1049.906678] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.925822] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256362, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.189036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35a84a0-b630-423c-857a-bddd81e01b9c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.199516] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a731a0-13ed-47eb-ae02-0a2a18e0b1e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.233142] env[61545]: DEBUG nova.network.neutron [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1050.235761] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea2e828-bfb9-424e-a2b3-9587e84fcfa9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.245617] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678507a1-8e3e-403d-b55e-d1e19fbb4625 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.263410] env[61545]: DEBUG nova.compute.provider_tree [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.313040] env[61545]: DEBUG nova.compute.manager [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1050.313040] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.313040] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e10e714b-1e2d-4ebe-9d9f-19c85893498e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.320030] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1050.320030] env[61545]: value = "task-4256363" [ 1050.320030] env[61545]: _type = "Task" [ 1050.320030] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.334345] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.376789] env[61545]: DEBUG nova.compute.manager [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.378024] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af4ad09-d0a1-4f50-8319-d60c22a53b07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.423944] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256362, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.482251] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.538964] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.539427] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.567799] env[61545]: DEBUG nova.network.neutron [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Updating instance_info_cache with network_info: [{"id": "5f4a8e1e-aa1b-4785-8467-54496ef2257b", "address": "fa:16:3e:61:3a:cd", "network": {"id": "eae53209-7628-446d-8fb2-132a6d9a4114", "bridge": "br-int", "label": "tempest-ServersTestJSON-1017663297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ce3ab2359464ab9ad254a721e1aeb47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d646f9d5-d2ad-4c22-bea5-85a965334de6", "external-id": "nsx-vlan-transportzone-606", "segmentation_id": 606, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f4a8e1e-aa", "ovs_interfaceid": "5f4a8e1e-aa1b-4785-8467-54496ef2257b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.613078] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "844f01ed-4dae-4e13-9d1c-09a73f413201" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.613396] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.613700] env[61545]: DEBUG nova.compute.manager [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Going to confirm migration 4 {{(pid=61545) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1050.769630] env[61545]: DEBUG nova.scheduler.client.report [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1050.832931] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256363, 'name': PowerOffVM_Task, 'duration_secs': 0.303565} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.833615] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.833755] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1050.834053] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838731', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'name': 'volume-11641b07-7823-42c5-8e71-d45453cc6704', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b', 'attached_at': '', 'detached_at': '', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'serial': '11641b07-7823-42c5-8e71-d45453cc6704'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1050.836398] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeac074d-8185-4b0e-991b-15cd25c932dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.856849] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1a87e8-1ee9-4f02-a730-47add7274ed3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.865569] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa33bf6c-780b-4484-9fbe-6488068ef161 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.888312] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332e9762-8438-406c-9c6e-770ddca12aed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.910651] env[61545]: INFO nova.compute.manager [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] instance snapshotting [ 1050.912573] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] The volume has not been displaced from its original location: [datastore2] volume-11641b07-7823-42c5-8e71-d45453cc6704/volume-11641b07-7823-42c5-8e71-d45453cc6704.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1050.918372] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Reconfiguring VM instance instance-0000004c to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1050.919292] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cf60cf3-00f7-4fc1-86b2-7c198a8de108 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.934888] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65c4b24-5fbf-41c2-8edd-88db45821c04 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.962566] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256362, 'name': ReconfigVM_Task, 'duration_secs': 0.568298} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.964621] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Reconfigured VM instance instance-00000053 to attach disk [datastore2] b33e29cc-fe26-429a-8799-8d790667cc1d/b33e29cc-fe26-429a-8799-8d790667cc1d.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.965998] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea599af-c912-4b3c-9091-719333dd8f1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.973022] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1050.973022] env[61545]: value = "task-4256364" [ 1050.973022] env[61545]: _type = "Task" [ 1050.973022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.973022] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afed282f-0644-4230-923e-f170920608c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.985695] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1050.985695] env[61545]: value = "task-4256365" [ 1050.985695] env[61545]: _type = "Task" [ 1050.985695] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.988652] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256364, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.000496] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256365, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.043225] env[61545]: INFO nova.compute.manager [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Detaching volume 705a0f51-10e1-4167-b382-baf0f7935774 [ 1051.071821] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Releasing lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.071821] env[61545]: DEBUG nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Instance network_info: |[{"id": "5f4a8e1e-aa1b-4785-8467-54496ef2257b", "address": "fa:16:3e:61:3a:cd", "network": {"id": "eae53209-7628-446d-8fb2-132a6d9a4114", "bridge": "br-int", "label": "tempest-ServersTestJSON-1017663297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ce3ab2359464ab9ad254a721e1aeb47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d646f9d5-d2ad-4c22-bea5-85a965334de6", "external-id": "nsx-vlan-transportzone-606", "segmentation_id": 606, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f4a8e1e-aa", "ovs_interfaceid": "5f4a8e1e-aa1b-4785-8467-54496ef2257b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1051.075468] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:3a:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd646f9d5-d2ad-4c22-bea5-85a965334de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f4a8e1e-aa1b-4785-8467-54496ef2257b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1051.082838] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Creating folder: Project (0ce3ab2359464ab9ad254a721e1aeb47). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1051.083447] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fff39327-5916-4654-a454-4e979441a03a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.090387] env[61545]: INFO nova.virt.block_device [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Attempting to driver detach volume 705a0f51-10e1-4167-b382-baf0f7935774 from mountpoint /dev/sdb [ 1051.090699] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1051.090981] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838749', 'volume_id': '705a0f51-10e1-4167-b382-baf0f7935774', 'name': 'volume-705a0f51-10e1-4167-b382-baf0f7935774', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2bb4ea0-e9fb-4198-80fa-acfd25fb226d', 'attached_at': '', 'detached_at': '', 'volume_id': '705a0f51-10e1-4167-b382-baf0f7935774', 'serial': '705a0f51-10e1-4167-b382-baf0f7935774'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1051.091999] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede5e06a-fef8-4bed-894b-e66abb9f702c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.097534] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Created folder: Project (0ce3ab2359464ab9ad254a721e1aeb47) in parent group-v838542. [ 1051.097768] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Creating folder: Instances. Parent ref: group-v838781. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1051.098132] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08c04a29-4fd8-476c-87fe-f7c408b7e47a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.123055] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588d0052-5495-486e-a042-87f956b9c0b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.136042] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4094307-9bd3-49ba-a5e9-2fd694cb1b39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.139478] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Created folder: Instances in parent group-v838781. [ 1051.139850] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.140155] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1051.141403] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80748417-60d6-4340-ba66-275b0445e21d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.180097] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba4bf27-b3c8-4e7b-a490-59a80b2ba3cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.186562] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1051.186562] env[61545]: value = "task-4256368" [ 1051.186562] env[61545]: _type = "Task" [ 1051.186562] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.201751] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] The volume has not been displaced from its original location: [datastore1] volume-705a0f51-10e1-4167-b382-baf0f7935774/volume-705a0f51-10e1-4167-b382-baf0f7935774.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1051.207507] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Reconfiguring VM instance instance-0000003f to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1051.209143] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.209291] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.209430] env[61545]: DEBUG nova.network.neutron [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.209673] env[61545]: DEBUG nova.objects.instance [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'info_cache' on Instance uuid 844f01ed-4dae-4e13-9d1c-09a73f413201 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.211659] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3fbd0e0-e8cf-4058-975b-de2e4af3852f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.231204] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256368, 'name': CreateVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.237238] env[61545]: DEBUG oslo_vmware.api [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1051.237238] env[61545]: value = "task-4256369" [ 1051.237238] env[61545]: _type = "Task" [ 1051.237238] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.247539] env[61545]: DEBUG oslo_vmware.api [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256369, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.486094] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1051.486500] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256364, 'name': ReconfigVM_Task, 'duration_secs': 0.253607} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.486885] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e29dd039-d5f0-4af5-a435-00b9dc9dda8b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.489530] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Reconfigured VM instance instance-0000004c to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1051.495394] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe6789b8-5926-434d-9324-17b22577cb29 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.520430] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256365, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.523753] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1051.523753] env[61545]: value = "task-4256371" [ 1051.523753] env[61545]: _type = "Task" [ 1051.523753] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.524407] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1051.524407] env[61545]: value = "task-4256370" [ 1051.524407] env[61545]: _type = "Task" [ 1051.524407] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.540712] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256370, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.702190] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256368, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.754454] env[61545]: DEBUG oslo_vmware.api [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256369, 'name': ReconfigVM_Task, 'duration_secs': 0.357582} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.754941] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Reconfigured VM instance instance-0000003f to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1051.760470] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c056e84d-02d4-4890-9d34-23e260ae30d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.778670] env[61545]: DEBUG oslo_vmware.api [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1051.778670] env[61545]: value = "task-4256372" [ 1051.778670] env[61545]: _type = "Task" [ 1051.778670] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.784396] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.599s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.784758] env[61545]: DEBUG nova.compute.manager [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=61545) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1051.788205] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.497s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.790078] env[61545]: INFO nova.compute.claims [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1051.800392] env[61545]: DEBUG oslo_vmware.api [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.903823] env[61545]: DEBUG nova.compute.manager [req-ea6f9f4d-59ca-4a73-9237-cb165134fe8d req-95e38918-8391-45e3-86e6-a03857c726b6 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Received event network-changed-5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1051.904820] env[61545]: DEBUG nova.compute.manager [req-ea6f9f4d-59ca-4a73-9237-cb165134fe8d req-95e38918-8391-45e3-86e6-a03857c726b6 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Refreshing instance network info cache due to event network-changed-5f4a8e1e-aa1b-4785-8467-54496ef2257b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1051.905391] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea6f9f4d-59ca-4a73-9237-cb165134fe8d req-95e38918-8391-45e3-86e6-a03857c726b6 service nova] Acquiring lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.905756] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea6f9f4d-59ca-4a73-9237-cb165134fe8d req-95e38918-8391-45e3-86e6-a03857c726b6 service nova] Acquired lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.906119] env[61545]: DEBUG nova.network.neutron [req-ea6f9f4d-59ca-4a73-9237-cb165134fe8d req-95e38918-8391-45e3-86e6-a03857c726b6 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Refreshing network info cache for port 5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1052.007738] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256365, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.038682] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256370, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.042740] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256371, 'name': ReconfigVM_Task, 'duration_secs': 0.219541} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.043028] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838731', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'name': 'volume-11641b07-7823-42c5-8e71-d45453cc6704', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b', 'attached_at': '', 'detached_at': '', 'volume_id': '11641b07-7823-42c5-8e71-d45453cc6704', 'serial': '11641b07-7823-42c5-8e71-d45453cc6704'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1052.043325] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1052.044193] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10315ed-7ba0-4306-b94f-791d5df37080 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.052924] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.053141] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2321a581-dd6c-4a77-9940-aafe9c580a5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.134443] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.138064] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.138064] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Deleting the datastore file [datastore2] 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.138064] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f310f8e-edfd-4ce6-9668-56d34104f226 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.147159] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for the task: (returnval){ [ 1052.147159] env[61545]: value = "task-4256374" [ 1052.147159] env[61545]: _type = "Task" [ 1052.147159] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.157301] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256374, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.202521] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256368, 'name': CreateVM_Task, 'duration_secs': 0.532419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.202761] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1052.203659] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.203878] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.204323] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1052.204679] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a66109be-bc83-4383-a5d2-1ef2a6edb2a4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.211556] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1052.211556] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523d4228-53fa-609c-79be-87cbcc47e613" [ 1052.211556] env[61545]: _type = "Task" [ 1052.211556] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.222738] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523d4228-53fa-609c-79be-87cbcc47e613, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.294032] env[61545]: DEBUG oslo_vmware.api [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256372, 'name': ReconfigVM_Task, 'duration_secs': 0.188359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.294235] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838749', 'volume_id': '705a0f51-10e1-4167-b382-baf0f7935774', 'name': 'volume-705a0f51-10e1-4167-b382-baf0f7935774', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2bb4ea0-e9fb-4198-80fa-acfd25fb226d', 'attached_at': '', 'detached_at': '', 'volume_id': '705a0f51-10e1-4167-b382-baf0f7935774', 'serial': '705a0f51-10e1-4167-b382-baf0f7935774'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1052.379965] env[61545]: INFO nova.scheduler.client.report [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted allocation for migration e866c452-7774-4a83-9c60-8d591a4adec9 [ 1052.508550] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256365, 'name': Rename_Task, 'duration_secs': 1.176892} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.509431] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.510650] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df5892b8-cfb2-4eb1-a5f9-6a5e37de317b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.518309] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1052.518309] env[61545]: value = "task-4256375" [ 1052.518309] env[61545]: _type = "Task" [ 1052.518309] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.534163] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256375, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.542267] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256370, 'name': CreateSnapshot_Task, 'duration_secs': 1.006674} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.542536] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1052.543390] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c843f6-1069-4d13-8234-6788036d6e48 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.658968] env[61545]: DEBUG oslo_vmware.api [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Task: {'id': task-4256374, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151134} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.659285] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.659496] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.659683] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.659857] env[61545]: INFO nova.compute.manager [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Took 2.35 seconds to destroy the instance on the hypervisor. [ 1052.660425] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.660425] env[61545]: DEBUG nova.compute.manager [-] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1052.660425] env[61545]: DEBUG nova.network.neutron [-] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.728025] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523d4228-53fa-609c-79be-87cbcc47e613, 'name': SearchDatastore_Task, 'duration_secs': 0.027555} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.728025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.728025] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1052.728025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.728717] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.729200] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.729688] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3553a83e-cac9-4483-b0ed-381af06c02aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.744221] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.744221] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1052.744221] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71e48c56-7e69-40f0-9c18-310396a26433 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.755110] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1052.755110] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b25aae-1d5f-901c-1192-a921b1da3e01" [ 1052.755110] env[61545]: _type = "Task" [ 1052.755110] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.768055] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b25aae-1d5f-901c-1192-a921b1da3e01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.771674] env[61545]: DEBUG nova.network.neutron [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [{"id": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "address": "fa:16:3e:1b:8a:41", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2f62a8a-ff", "ovs_interfaceid": "a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.855090] env[61545]: DEBUG nova.objects.instance [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.897249] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0deda3e0-786d-4667-8ec5-d1f9634773f9 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 20.490s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.970044] env[61545]: DEBUG nova.network.neutron [req-ea6f9f4d-59ca-4a73-9237-cb165134fe8d req-95e38918-8391-45e3-86e6-a03857c726b6 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Updated VIF entry in instance network info cache for port 5f4a8e1e-aa1b-4785-8467-54496ef2257b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1052.970507] env[61545]: DEBUG nova.network.neutron [req-ea6f9f4d-59ca-4a73-9237-cb165134fe8d req-95e38918-8391-45e3-86e6-a03857c726b6 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Updating instance_info_cache with network_info: [{"id": "5f4a8e1e-aa1b-4785-8467-54496ef2257b", "address": "fa:16:3e:61:3a:cd", "network": {"id": "eae53209-7628-446d-8fb2-132a6d9a4114", "bridge": "br-int", "label": "tempest-ServersTestJSON-1017663297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ce3ab2359464ab9ad254a721e1aeb47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d646f9d5-d2ad-4c22-bea5-85a965334de6", "external-id": "nsx-vlan-transportzone-606", "segmentation_id": 606, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f4a8e1e-aa", "ovs_interfaceid": "5f4a8e1e-aa1b-4785-8467-54496ef2257b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.009810] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524d80e0-3f47-6ba8-c5e7-d8471969bb0e/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1053.012451] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54938211-b8ea-4c74-91fa-c53b5262e5dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.024427] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524d80e0-3f47-6ba8-c5e7-d8471969bb0e/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1053.024604] env[61545]: ERROR oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524d80e0-3f47-6ba8-c5e7-d8471969bb0e/disk-0.vmdk due to incomplete transfer. [ 1053.025265] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1b50cc96-2cbe-4041-81be-75749ec84a7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.032313] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256375, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.036030] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524d80e0-3f47-6ba8-c5e7-d8471969bb0e/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1053.036654] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Uploaded image 623bd67d-a8b3-4e1b-9169-f7cd1bc393b7 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1053.038797] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1053.039151] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3a211cc6-8d9e-4c1f-8a7c-fc698e45378a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.047097] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1053.047097] env[61545]: value = "task-4256376" [ 1053.047097] env[61545]: _type = "Task" [ 1053.047097] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.066126] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1053.066627] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256376, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.067315] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-402d19bb-8a82-49c8-a98e-2e6cf68a3a58 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.077440] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1053.077440] env[61545]: value = "task-4256377" [ 1053.077440] env[61545]: _type = "Task" [ 1053.077440] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.089999] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256377, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.252735] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7014f36e-c51d-4a56-b0fa-0b0de9aaecd6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.270720] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72aeeb6-9bed-4e7c-8f80-19dc2edda487 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.274882] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b25aae-1d5f-901c-1192-a921b1da3e01, 'name': SearchDatastore_Task, 'duration_secs': 0.014343} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.276352] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a494ec82-d0d2-4277-9e82-276cb9f47c30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.304806] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-844f01ed-4dae-4e13-9d1c-09a73f413201" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.305100] env[61545]: DEBUG nova.objects.instance [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'migration_context' on Instance uuid 844f01ed-4dae-4e13-9d1c-09a73f413201 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.307623] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b2d73c-6d96-4960-a722-c06ec9816d22 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.316569] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1053.316569] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52efc475-a5dc-ce36-8ba2-166990f16a13" [ 1053.316569] env[61545]: _type = "Task" [ 1053.316569] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.326912] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5205bc-ec1e-4aba-81d2-e90cbb04cb42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.338704] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52efc475-a5dc-ce36-8ba2-166990f16a13, 'name': SearchDatastore_Task, 'duration_secs': 0.011323} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.339617] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.340022] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 91eeceeb-c11e-414b-8ae6-e68e927f1f1e/91eeceeb-c11e-414b-8ae6-e68e927f1f1e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1053.340206] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c252e19-4a54-4b0c-a7ad-db9168a95482 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.353598] env[61545]: DEBUG nova.compute.provider_tree [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.363045] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1053.363045] env[61545]: value = "task-4256378" [ 1053.363045] env[61545]: _type = "Task" [ 1053.363045] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.379820] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256378, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.474777] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea6f9f4d-59ca-4a73-9237-cb165134fe8d req-95e38918-8391-45e3-86e6-a03857c726b6 service nova] Releasing lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.535604] env[61545]: DEBUG oslo_vmware.api [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256375, 'name': PowerOnVM_Task, 'duration_secs': 0.647166} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.536037] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.536828] env[61545]: INFO nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Took 11.67 seconds to spawn the instance on the hypervisor. [ 1053.536828] env[61545]: DEBUG nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.538192] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa99695-623d-4bc4-890e-ac7d5decea47 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.581425] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256376, 'name': Destroy_Task, 'duration_secs': 0.373174} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.586938] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Destroyed the VM [ 1053.587322] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1053.588293] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-307a585d-4fe6-4ed7-a375-4c8c922d3ca2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.599533] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256377, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.604026] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1053.604026] env[61545]: value = "task-4256379" [ 1053.604026] env[61545]: _type = "Task" [ 1053.604026] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.619968] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256379, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.816739] env[61545]: DEBUG nova.objects.base [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Object Instance<844f01ed-4dae-4e13-9d1c-09a73f413201> lazy-loaded attributes: info_cache,migration_context {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1053.817875] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019d8bba-c78b-4726-b14a-e71fb68c6136 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.843590] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e2f22fa-51e2-4afe-b024-9c4a384c9a7e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.852776] env[61545]: DEBUG oslo_vmware.api [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1053.852776] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52faa146-05b1-3493-2a81-7154603085c7" [ 1053.852776] env[61545]: _type = "Task" [ 1053.852776] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.858249] env[61545]: DEBUG nova.scheduler.client.report [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1053.868076] env[61545]: DEBUG oslo_vmware.api [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52faa146-05b1-3493-2a81-7154603085c7, 'name': SearchDatastore_Task, 'duration_secs': 0.008118} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.871869] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.871869] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bc27d7b7-5486-4dfb-bd31-0eca49563bc8 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.332s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.877894] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256378, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48455} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.879442] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 91eeceeb-c11e-414b-8ae6-e68e927f1f1e/91eeceeb-c11e-414b-8ae6-e68e927f1f1e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1053.879442] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1053.879442] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04e15e7e-5a28-496e-91b0-9987cf5743e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.887461] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1053.887461] env[61545]: value = "task-4256380" [ 1053.887461] env[61545]: _type = "Task" [ 1053.887461] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.899943] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.078031] env[61545]: INFO nova.compute.manager [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Took 29.79 seconds to build instance. [ 1054.091703] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256377, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.116326] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.116698] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.116929] env[61545]: DEBUG nova.compute.manager [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.117677] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256379, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.118687] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624a7638-834e-4ba5-9d13-7920e2d55e7e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.127196] env[61545]: DEBUG nova.compute.manager [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1054.128028] env[61545]: DEBUG nova.objects.instance [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.316246] env[61545]: DEBUG nova.compute.manager [req-dcd8fc09-bb79-4ec3-9cf2-ae7cc9bd4d97 req-d2ebd72c-3f77-434e-a78d-30197a82c087 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Received event network-vif-deleted-fab93887-61ce-41f0-a531-d540740b5acd {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1054.316488] env[61545]: INFO nova.compute.manager [req-dcd8fc09-bb79-4ec3-9cf2-ae7cc9bd4d97 req-d2ebd72c-3f77-434e-a78d-30197a82c087 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Neutron deleted interface fab93887-61ce-41f0-a531-d540740b5acd; detaching it from the instance and deleting it from the info cache [ 1054.316692] env[61545]: DEBUG nova.network.neutron [req-dcd8fc09-bb79-4ec3-9cf2-ae7cc9bd4d97 req-d2ebd72c-3f77-434e-a78d-30197a82c087 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.363464] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.364036] env[61545]: DEBUG nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1054.369749] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.228s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.371413] env[61545]: INFO nova.compute.claims [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1054.405493] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177085} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.408262] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1054.408262] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ca7402-0d1d-42c8-92cb-96e366de1f16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.432518] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 91eeceeb-c11e-414b-8ae6-e68e927f1f1e/91eeceeb-c11e-414b-8ae6-e68e927f1f1e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.437023] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-269d069b-39f8-404b-aba1-d8da750d3289 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.458956] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1054.458956] env[61545]: value = "task-4256381" [ 1054.458956] env[61545]: _type = "Task" [ 1054.458956] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.469439] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256381, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.538202] env[61545]: DEBUG nova.network.neutron [-] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.584025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d20b7f5f-bc92-4013-954e-7d5d3f41151a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "b33e29cc-fe26-429a-8799-8d790667cc1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.306s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.595773] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256377, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.615912] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256379, 'name': RemoveSnapshot_Task, 'duration_secs': 0.579266} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.615912] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1054.616365] env[61545]: DEBUG nova.compute.manager [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.616960] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f809b080-fc95-4b72-9ba8-6b7a1e7cbbef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.819605] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-008e1cb8-8c93-4670-9a1a-4c334d46bbd9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.830260] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed650a00-a634-41e7-886e-ea9c9467013a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.874530] env[61545]: DEBUG nova.compute.manager [req-dcd8fc09-bb79-4ec3-9cf2-ae7cc9bd4d97 req-d2ebd72c-3f77-434e-a78d-30197a82c087 service nova] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Detach interface failed, port_id=fab93887-61ce-41f0-a531-d540740b5acd, reason: Instance 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1054.881748] env[61545]: DEBUG nova.compute.utils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1054.889715] env[61545]: DEBUG nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1054.890083] env[61545]: DEBUG nova.network.neutron [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1054.900754] env[61545]: DEBUG nova.objects.instance [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'flavor' on Instance uuid 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.939438] env[61545]: DEBUG nova.policy [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82de8ada56cd46319fe4c7ecd4957abb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da39b1ee6df640b89a9dab58e3380397', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1054.970873] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256381, 'name': ReconfigVM_Task, 'duration_secs': 0.373799} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.971370] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 91eeceeb-c11e-414b-8ae6-e68e927f1f1e/91eeceeb-c11e-414b-8ae6-e68e927f1f1e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.971943] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2889e519-26d7-43d5-b344-e5a97a1ce91e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.982574] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1054.982574] env[61545]: value = "task-4256382" [ 1054.982574] env[61545]: _type = "Task" [ 1054.982574] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.993027] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256382, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.040351] env[61545]: INFO nova.compute.manager [-] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Took 2.38 seconds to deallocate network for instance. [ 1055.095105] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256377, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.133694] env[61545]: INFO nova.compute.manager [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Shelve offloading [ 1055.141974] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.142332] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b94434a-4f4d-4da8-9b21-8b03fa6c10ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.150254] env[61545]: DEBUG oslo_vmware.api [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1055.150254] env[61545]: value = "task-4256383" [ 1055.150254] env[61545]: _type = "Task" [ 1055.150254] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.165280] env[61545]: DEBUG oslo_vmware.api [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.390533] env[61545]: DEBUG nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1055.409332] env[61545]: DEBUG nova.network.neutron [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Successfully created port: 5a70c23b-fb28-4302-add1-42b019c5c4f7 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1055.415225] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.415404] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.415575] env[61545]: DEBUG nova.network.neutron [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1055.415785] env[61545]: DEBUG nova.objects.instance [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'info_cache' on Instance uuid 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.499584] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256382, 'name': Rename_Task, 'duration_secs': 0.18234} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.499584] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.499584] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14e4927f-2f6e-4e72-a85c-2ebac6b27b42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.507398] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1055.507398] env[61545]: value = "task-4256384" [ 1055.507398] env[61545]: _type = "Task" [ 1055.507398] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.318385] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.326539] env[61545]: DEBUG nova.objects.base [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Object Instance<1be4da80-c9ee-424e-b4e3-bdd22eb0cd67> lazy-loaded attributes: flavor,info_cache {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1056.326888] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.327106] env[61545]: WARNING oslo_vmware.common.loopingcall [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] task run outlasted interval by 0.319435 sec [ 1056.328905] env[61545]: INFO nova.compute.manager [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Took 1.29 seconds to detach 1 volumes for instance. [ 1056.331065] env[61545]: DEBUG nova.compute.manager [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Deleting volume: 11641b07-7823-42c5-8e71-d45453cc6704 {{(pid=61545) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1056.337265] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3c8adc6-df40-4923-811a-2b867d08923c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.356466] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256377, 'name': CloneVM_Task, 'duration_secs': 2.233283} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.363639] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Created linked-clone VM from snapshot [ 1056.363639] env[61545]: DEBUG oslo_vmware.api [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256384, 'name': PowerOnVM_Task, 'duration_secs': 0.537798} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.364029] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1056.364029] env[61545]: value = "task-4256385" [ 1056.364029] env[61545]: _type = "Task" [ 1056.364029] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.364029] env[61545]: DEBUG oslo_vmware.api [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256383, 'name': PowerOffVM_Task, 'duration_secs': 0.266888} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.368206] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf50d1a-cc07-4ac2-a8bf-4be9d6756770 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.371480] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.371480] env[61545]: INFO nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Took 9.23 seconds to spawn the instance on the hypervisor. [ 1056.372100] env[61545]: DEBUG nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.372470] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.372618] env[61545]: DEBUG nova.compute.manager [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.374881] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a3f615-3561-4d01-b155-5f0f189653a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.382361] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179b7d60-6a1d-4ac6-ae7b-a70e0f7c225a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.398039] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Uploading image f5b525de-d65d-4769-8dbe-dd1a78b8f257 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1056.408264] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1056.408264] env[61545]: DEBUG nova.compute.manager [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.413874] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30a4a63-5586-445e-8dbc-edd8e42baf0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.424016] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.424016] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.424201] env[61545]: DEBUG nova.network.neutron [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.443763] env[61545]: DEBUG oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1056.443763] env[61545]: value = "vm-838785" [ 1056.443763] env[61545]: _type = "VirtualMachine" [ 1056.443763] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1056.445749] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a8c8f458-08fe-4330-9caa-4b6637fefea5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.453617] env[61545]: DEBUG oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lease: (returnval){ [ 1056.453617] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b29ca7-b181-f160-d0eb-db4771c85d12" [ 1056.453617] env[61545]: _type = "HttpNfcLease" [ 1056.453617] env[61545]: } obtained for exporting VM: (result){ [ 1056.453617] env[61545]: value = "vm-838785" [ 1056.453617] env[61545]: _type = "VirtualMachine" [ 1056.453617] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1056.454008] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the lease: (returnval){ [ 1056.454008] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b29ca7-b181-f160-d0eb-db4771c85d12" [ 1056.454008] env[61545]: _type = "HttpNfcLease" [ 1056.454008] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1056.472194] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1056.472194] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b29ca7-b181-f160-d0eb-db4771c85d12" [ 1056.472194] env[61545]: _type = "HttpNfcLease" [ 1056.472194] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1056.696348] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bc9aaf-df5e-42d5-90fe-174bb667ec8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.705963] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a70c7c-e03a-4ab7-8d51-7e427da31693 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.743608] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27138b87-791a-4b67-a17b-a872c1d4f042 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.754096] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5936107-2ae9-46aa-ae80-feae23b856f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.769836] env[61545]: DEBUG nova.compute.provider_tree [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.836936] env[61545]: DEBUG nova.compute.manager [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.836936] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fce32c8-c712-46ba-a4bd-079f0c3b3390 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.847967] env[61545]: DEBUG nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1056.900658] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1056.900912] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.901082] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1056.902932] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.902932] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1056.902932] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1056.902932] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1056.902932] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1056.902932] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1056.902932] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1056.902932] env[61545]: DEBUG nova.virt.hardware [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1056.903836] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfbe2b6-ef93-49ec-a7ec-969406c954a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.913036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95078a11-798b-4db5-a87d-c857da50fc6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.926135] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6dcc2314-508d-4b04-bc44-41aa6f94f33b tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.809s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.928968] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.941993] env[61545]: INFO nova.compute.manager [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Took 24.54 seconds to build instance. [ 1056.968403] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1056.968403] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b29ca7-b181-f160-d0eb-db4771c85d12" [ 1056.968403] env[61545]: _type = "HttpNfcLease" [ 1056.968403] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1056.968716] env[61545]: DEBUG oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1056.968716] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b29ca7-b181-f160-d0eb-db4771c85d12" [ 1056.968716] env[61545]: _type = "HttpNfcLease" [ 1056.968716] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1056.970200] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcd0773-0a6f-40ad-bbdb-caa162212053 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.979871] env[61545]: DEBUG oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207200e-7bf3-5d19-ff31-c486cefbbd95/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1056.980092] env[61545]: DEBUG oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207200e-7bf3-5d19-ff31-c486cefbbd95/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1057.078763] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-60d58d77-03f8-41fd-b2ca-c3176fc0466f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.274133] env[61545]: DEBUG nova.scheduler.client.report [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.355105] env[61545]: INFO nova.compute.manager [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] instance snapshotting [ 1057.361932] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9914db93-c93e-4ccc-9790-bd37d431f88b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.368788] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "855904d8-7eb3-405d-9236-ab4ba9b33940" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.369085] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "855904d8-7eb3-405d-9236-ab4ba9b33940" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.369355] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "855904d8-7eb3-405d-9236-ab4ba9b33940-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.369557] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "855904d8-7eb3-405d-9236-ab4ba9b33940-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.369724] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "855904d8-7eb3-405d-9236-ab4ba9b33940-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.388245] env[61545]: INFO nova.compute.manager [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Terminating instance [ 1057.391120] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc693f39-51b4-4242-ab44-29870a9276af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.448167] env[61545]: DEBUG oslo_concurrency.lockutils [None req-64146504-703f-462a-932d-577d6507810c tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.089s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.452801] env[61545]: DEBUG nova.objects.instance [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.782296] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.413s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.783989] env[61545]: DEBUG nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1057.789296] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.899s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.789916] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.796274] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.800s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.796274] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.796274] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.313s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.796687] env[61545]: DEBUG nova.objects.instance [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'pci_requests' on Instance uuid 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.848351] env[61545]: INFO nova.scheduler.client.report [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted allocations for instance 578ce929-99fd-47ae-8275-e4ac9abe8d49 [ 1057.852687] env[61545]: INFO nova.scheduler.client.report [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted allocations for instance f7a16153-2ef7-4be4-90a2-5ad6616203f8 [ 1057.898037] env[61545]: DEBUG nova.compute.manager [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1057.898037] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.899165] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd00414-3d18-4350-8ff8-d4f3f13cb0cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.906147] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1057.906930] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-421c9fbc-a4a5-41d1-9190-10f2b5340bbf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.919938] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1057.919938] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6727caf2-7a19-42f2-a82e-2f19484a3920 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.920260] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1057.920260] env[61545]: value = "task-4256388" [ 1057.920260] env[61545]: _type = "Task" [ 1057.920260] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.928026] env[61545]: DEBUG oslo_vmware.api [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1057.928026] env[61545]: value = "task-4256389" [ 1057.928026] env[61545]: _type = "Task" [ 1057.928026] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.936429] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256388, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.942160] env[61545]: DEBUG nova.network.neutron [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance_info_cache with network_info: [{"id": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "address": "fa:16:3e:c5:11:ab", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd36c7f3a-2a", "ovs_interfaceid": "d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.949781] env[61545]: DEBUG oslo_vmware.api [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256389, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.964153] env[61545]: DEBUG oslo_concurrency.lockutils [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.964153] env[61545]: DEBUG oslo_concurrency.lockutils [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.964153] env[61545]: DEBUG nova.network.neutron [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1057.965326] env[61545]: DEBUG nova.objects.instance [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'info_cache' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.045983] env[61545]: DEBUG nova.network.neutron [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating instance_info_cache with network_info: [{"id": "2147b830-281d-4a24-90d1-22eccefc4c5c", "address": "fa:16:3e:57:07:3e", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2147b830-28", "ovs_interfaceid": "2147b830-281d-4a24-90d1-22eccefc4c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.291235] env[61545]: DEBUG nova.compute.utils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1058.292771] env[61545]: DEBUG nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1058.292771] env[61545]: DEBUG nova.network.neutron [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1058.301969] env[61545]: DEBUG nova.objects.instance [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'numa_topology' on Instance uuid 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.304899] env[61545]: DEBUG nova.network.neutron [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Successfully updated port: 5a70c23b-fb28-4302-add1-42b019c5c4f7 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1058.375043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1dd47ce4-2932-41cf-bcc7-fe319fff821a tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "578ce929-99fd-47ae-8275-e4ac9abe8d49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.966s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.376377] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a80214f-7f0c-4f4d-8c9f-735a77b60cff tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "f7a16153-2ef7-4be4-90a2-5ad6616203f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.211s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.431769] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256388, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.444228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.447079] env[61545]: DEBUG oslo_vmware.api [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256389, 'name': PowerOffVM_Task, 'duration_secs': 0.246477} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.447079] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.447079] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.447299] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de2ed6d4-df7c-4671-90b0-6f59ecf82742 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.469526] env[61545]: DEBUG nova.objects.base [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1058.527280] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1058.528557] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1058.529324] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleting the datastore file [datastore2] 855904d8-7eb3-405d-9236-ab4ba9b33940 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.529706] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0be3682a-e059-4496-b78e-711d25cc2c9b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.538502] env[61545]: DEBUG oslo_vmware.api [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1058.538502] env[61545]: value = "task-4256391" [ 1058.538502] env[61545]: _type = "Task" [ 1058.538502] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.549636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.552424] env[61545]: DEBUG oslo_vmware.api [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256391, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.725433] env[61545]: DEBUG nova.policy [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '113fc58985704b0b9e0a28be2f61cd68', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9007a6e389c0467c8e2077309984eaab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1058.799495] env[61545]: DEBUG nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1058.806958] env[61545]: INFO nova.compute.claims [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1058.810660] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "refresh_cache-2f8567b1-7291-4705-8ef3-23547eb4860e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.811041] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "refresh_cache-2f8567b1-7291-4705-8ef3-23547eb4860e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.811381] env[61545]: DEBUG nova.network.neutron [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.832662] env[61545]: DEBUG oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b79560-58b2-4f55-390c-22232cc03849/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1058.836145] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc37dde-a0d4-4dbc-ad3f-d986567bf461 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.844110] env[61545]: DEBUG oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b79560-58b2-4f55-390c-22232cc03849/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1058.844304] env[61545]: ERROR oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b79560-58b2-4f55-390c-22232cc03849/disk-0.vmdk due to incomplete transfer. [ 1058.844577] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-542e0c42-4671-4c10-8230-a82e6db0996c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.854357] env[61545]: DEBUG oslo_vmware.rw_handles [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b79560-58b2-4f55-390c-22232cc03849/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1058.854970] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Uploaded image 5034b2e2-dbdc-4463-8a43-60cf580bf4f3 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1058.857778] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1058.857778] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-25127978-0ef6-48e2-8416-05f9dfdfd3b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.865755] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1058.865755] env[61545]: value = "task-4256392" [ 1058.865755] env[61545]: _type = "Task" [ 1058.865755] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.879220] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256392, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.905515] env[61545]: DEBUG nova.compute.manager [req-e1591440-4de7-4e62-b1a2-e0e1ff649195 req-fe07712e-e126-4031-9cca-cf530b646f4b service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Received event network-changed-5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1058.905714] env[61545]: DEBUG nova.compute.manager [req-e1591440-4de7-4e62-b1a2-e0e1ff649195 req-fe07712e-e126-4031-9cca-cf530b646f4b service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Refreshing instance network info cache due to event network-changed-5f4a8e1e-aa1b-4785-8467-54496ef2257b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1058.906092] env[61545]: DEBUG oslo_concurrency.lockutils [req-e1591440-4de7-4e62-b1a2-e0e1ff649195 req-fe07712e-e126-4031-9cca-cf530b646f4b service nova] Acquiring lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.906636] env[61545]: DEBUG oslo_concurrency.lockutils [req-e1591440-4de7-4e62-b1a2-e0e1ff649195 req-fe07712e-e126-4031-9cca-cf530b646f4b service nova] Acquired lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.906837] env[61545]: DEBUG nova.network.neutron [req-e1591440-4de7-4e62-b1a2-e0e1ff649195 req-fe07712e-e126-4031-9cca-cf530b646f4b service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Refreshing network info cache for port 5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.932204] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256388, 'name': CreateSnapshot_Task, 'duration_secs': 0.921762} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.932527] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1058.933623] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca811f13-b48c-414f-9864-a1d3b6309d56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.049540] env[61545]: DEBUG oslo_vmware.api [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256391, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234852} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.049713] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.049959] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.050155] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.050353] env[61545]: INFO nova.compute.manager [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1059.050675] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.051273] env[61545]: DEBUG nova.compute.manager [-] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.051450] env[61545]: DEBUG nova.network.neutron [-] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.336885] env[61545]: DEBUG nova.compute.manager [req-897d1f48-f8dd-40ae-9fa0-8469f8542fb0 req-5215d7dc-e767-4908-a0d7-6194ceb133df service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Received event network-vif-plugged-5a70c23b-fb28-4302-add1-42b019c5c4f7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1059.338659] env[61545]: DEBUG oslo_concurrency.lockutils [req-897d1f48-f8dd-40ae-9fa0-8469f8542fb0 req-5215d7dc-e767-4908-a0d7-6194ceb133df service nova] Acquiring lock "2f8567b1-7291-4705-8ef3-23547eb4860e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.339027] env[61545]: DEBUG oslo_concurrency.lockutils [req-897d1f48-f8dd-40ae-9fa0-8469f8542fb0 req-5215d7dc-e767-4908-a0d7-6194ceb133df service nova] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.339320] env[61545]: DEBUG oslo_concurrency.lockutils [req-897d1f48-f8dd-40ae-9fa0-8469f8542fb0 req-5215d7dc-e767-4908-a0d7-6194ceb133df service nova] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.339533] env[61545]: DEBUG nova.compute.manager [req-897d1f48-f8dd-40ae-9fa0-8469f8542fb0 req-5215d7dc-e767-4908-a0d7-6194ceb133df service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] No waiting events found dispatching network-vif-plugged-5a70c23b-fb28-4302-add1-42b019c5c4f7 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1059.339709] env[61545]: WARNING nova.compute.manager [req-897d1f48-f8dd-40ae-9fa0-8469f8542fb0 req-5215d7dc-e767-4908-a0d7-6194ceb133df service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Received unexpected event network-vif-plugged-5a70c23b-fb28-4302-add1-42b019c5c4f7 for instance with vm_state building and task_state spawning. [ 1059.378423] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256392, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.457182] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1059.458429] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1059.459092] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-376345ab-b247-42dc-b86f-0997ad40c1e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.464117] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e8a0006-afe9-4de0-b2c7-40acbe9a3fbc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.467029] env[61545]: DEBUG nova.network.neutron [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1059.480348] env[61545]: DEBUG oslo_vmware.api [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1059.480348] env[61545]: value = "task-4256393" [ 1059.480348] env[61545]: _type = "Task" [ 1059.480348] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.481314] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1059.481314] env[61545]: value = "task-4256394" [ 1059.481314] env[61545]: _type = "Task" [ 1059.481314] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.497814] env[61545]: DEBUG oslo_vmware.api [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256393, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.502317] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256394, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.818092] env[61545]: DEBUG nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1059.865452] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1059.865452] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.865742] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1059.865861] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.866333] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1059.866526] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1059.866812] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1059.867707] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1059.867707] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1059.867707] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1059.867707] env[61545]: DEBUG nova.virt.hardware [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1059.869540] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcea14eb-1092-4855-8efd-f09f7e00b196 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.892255] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4006bf50-94cd-4ec4-8147-57030b8bed82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.898062] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256392, 'name': Destroy_Task, 'duration_secs': 0.793555} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.899853] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Destroyed the VM [ 1059.900592] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1059.905164] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fff844fc-c424-46ab-9190-92994fd613f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.933260] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1059.933260] env[61545]: value = "task-4256395" [ 1059.933260] env[61545]: _type = "Task" [ 1059.933260] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.958821] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256395, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.998744] env[61545]: DEBUG oslo_vmware.api [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256393, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.011561] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256394, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.064552] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1060.065503] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171b6321-b7bf-471c-9e71-4ec79711a1c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.078934] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1060.079359] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c338bfa9-34b7-4508-8bd0-c2159a503407 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.162902] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1060.162902] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1060.162902] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleting the datastore file [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1060.169877] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98906227-ad35-4fbf-ba9f-8a9286db6e01 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.175735] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1060.175735] env[61545]: value = "task-4256397" [ 1060.175735] env[61545]: _type = "Task" [ 1060.175735] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.190753] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256397, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.323951] env[61545]: DEBUG nova.network.neutron [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updating instance_info_cache with network_info: [{"id": "989b3fc6-0843-488f-9af2-39bb487eb78a", "address": "fa:16:3e:40:56:28", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989b3fc6-08", "ovs_interfaceid": "989b3fc6-0843-488f-9af2-39bb487eb78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.372133] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d50653-6a3c-4a04-8a1e-065e56af4afb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.382622] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bd0925-89f4-4c74-8d74-8e157788cc9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.386772] env[61545]: DEBUG nova.network.neutron [-] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.422876] env[61545]: DEBUG nova.network.neutron [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Updating instance_info_cache with network_info: [{"id": "5a70c23b-fb28-4302-add1-42b019c5c4f7", "address": "fa:16:3e:03:f2:5c", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a70c23b-fb", "ovs_interfaceid": "5a70c23b-fb28-4302-add1-42b019c5c4f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.424963] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d17a5e5-a349-4dd4-bd51-a66e82e91ed8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.445578] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e02c715-3cee-4af6-8724-152db5a13652 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.452543] env[61545]: DEBUG nova.network.neutron [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Successfully created port: 56c23893-2f0c-48e2-ad52-6dcdf0869d98 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1060.469168] env[61545]: DEBUG nova.compute.provider_tree [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.475141] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256395, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.497741] env[61545]: DEBUG oslo_vmware.api [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256393, 'name': PowerOnVM_Task, 'duration_secs': 0.613893} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.498611] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1060.498855] env[61545]: DEBUG nova.compute.manager [None req-4d792df3-727e-4493-9f85-e22a4715a08f tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1060.500406] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e7bb5f-8127-4f91-97b2-3aaacaf11f2f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.508048] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256394, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.682715] env[61545]: DEBUG nova.network.neutron [req-e1591440-4de7-4e62-b1a2-e0e1ff649195 req-fe07712e-e126-4031-9cca-cf530b646f4b service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Updated VIF entry in instance network info cache for port 5f4a8e1e-aa1b-4785-8467-54496ef2257b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1060.682715] env[61545]: DEBUG nova.network.neutron [req-e1591440-4de7-4e62-b1a2-e0e1ff649195 req-fe07712e-e126-4031-9cca-cf530b646f4b service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Updating instance_info_cache with network_info: [{"id": "5f4a8e1e-aa1b-4785-8467-54496ef2257b", "address": "fa:16:3e:61:3a:cd", "network": {"id": "eae53209-7628-446d-8fb2-132a6d9a4114", "bridge": "br-int", "label": "tempest-ServersTestJSON-1017663297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ce3ab2359464ab9ad254a721e1aeb47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d646f9d5-d2ad-4c22-bea5-85a965334de6", "external-id": "nsx-vlan-transportzone-606", "segmentation_id": 606, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f4a8e1e-aa", "ovs_interfaceid": "5f4a8e1e-aa1b-4785-8467-54496ef2257b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.696322] env[61545]: DEBUG oslo_vmware.api [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256397, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280956} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.696653] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1060.696831] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1060.696995] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1060.768615] env[61545]: INFO nova.scheduler.client.report [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleted allocations for instance f9c9c447-e676-4143-b329-fb6d71bcd553 [ 1060.829387] env[61545]: DEBUG oslo_concurrency.lockutils [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Releasing lock "refresh_cache-c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.889655] env[61545]: INFO nova.compute.manager [-] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Took 1.84 seconds to deallocate network for instance. [ 1060.931316] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "refresh_cache-2f8567b1-7291-4705-8ef3-23547eb4860e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.931796] env[61545]: DEBUG nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Instance network_info: |[{"id": "5a70c23b-fb28-4302-add1-42b019c5c4f7", "address": "fa:16:3e:03:f2:5c", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a70c23b-fb", "ovs_interfaceid": "5a70c23b-fb28-4302-add1-42b019c5c4f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1060.932474] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:f2:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a70c23b-fb28-4302-add1-42b019c5c4f7', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1060.942723] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1060.943691] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1060.947274] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68295e76-6603-4d72-b398-d6c2d0cfd6a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.972566] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256395, 'name': RemoveSnapshot_Task, 'duration_secs': 0.756942} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.974158] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1060.974455] env[61545]: DEBUG nova.compute.manager [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1060.974879] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1060.974879] env[61545]: value = "task-4256398" [ 1060.974879] env[61545]: _type = "Task" [ 1060.974879] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.977304] env[61545]: DEBUG nova.scheduler.client.report [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.979849] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba7091b-332a-4302-aebe-596e71a0eec8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.995945] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256398, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.008902] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256394, 'name': CloneVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.188237] env[61545]: DEBUG oslo_concurrency.lockutils [req-e1591440-4de7-4e62-b1a2-e0e1ff649195 req-fe07712e-e126-4031-9cca-cf530b646f4b service nova] Releasing lock "refresh_cache-91eeceeb-c11e-414b-8ae6-e68e927f1f1e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.273376] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "8d838d3b-32ad-4bb2-839e-6bd81c363447" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.273724] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "8d838d3b-32ad-4bb2-839e-6bd81c363447" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.273950] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "8d838d3b-32ad-4bb2-839e-6bd81c363447-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.274479] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "8d838d3b-32ad-4bb2-839e-6bd81c363447-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.274479] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "8d838d3b-32ad-4bb2-839e-6bd81c363447-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.280350] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.280350] env[61545]: INFO nova.compute.manager [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Terminating instance [ 1061.398529] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.483986] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.688s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.490741] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 7.620s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.500950] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256398, 'name': CreateVM_Task, 'duration_secs': 0.50174} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.500950] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.501566] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.501742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.502114] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1061.502404] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ff59028-fd8f-46b3-a451-15edd6404819 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.510163] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256394, 'name': CloneVM_Task, 'duration_secs': 1.558785} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.510914] env[61545]: INFO nova.compute.manager [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Shelve offloading [ 1061.512571] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Created linked-clone VM from snapshot [ 1061.514501] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfcccb4-d03f-42fb-9eed-151518ac9404 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.521563] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1061.521563] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b12f6d-6ab5-b755-8425-c177396496e7" [ 1061.521563] env[61545]: _type = "Task" [ 1061.521563] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.528323] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Uploading image f0939709-3ee1-4707-9cf9-dc346e075998 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1061.533963] env[61545]: DEBUG nova.compute.manager [req-30cd47cb-60ba-42b3-a969-6beee766f13a req-9382f91b-1e5c-4461-bc48-cb1ded586353 service nova] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Received event network-vif-deleted-7e18e278-e525-407c-90fa-107184503c1c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1061.534636] env[61545]: INFO nova.network.neutron [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating port 7e7e6bd8-fac2-4516-af29-a249216acca6 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1061.543289] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b12f6d-6ab5-b755-8425-c177396496e7, 'name': SearchDatastore_Task, 'duration_secs': 0.01759} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.543537] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.544162] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1061.544162] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.544162] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.544377] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.544603] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e2e5563-c4bb-457b-be09-56f04185c833 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.547896] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1061.548898] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f4a2a4cd-c53a-4299-a2d7-05760665c62e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.557491] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1061.557491] env[61545]: value = "task-4256399" [ 1061.557491] env[61545]: _type = "Task" [ 1061.557491] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.558997] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.559583] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1061.563392] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47a5716d-f1e2-4265-bc23-d9e3934cf9da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.571210] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1061.571210] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5260c779-8f2a-80ec-00b6-5976bffd4c39" [ 1061.571210] env[61545]: _type = "Task" [ 1061.571210] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.575317] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256399, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.586516] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5260c779-8f2a-80ec-00b6-5976bffd4c39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.633517] env[61545]: DEBUG nova.compute.manager [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Received event network-changed-5a70c23b-fb28-4302-add1-42b019c5c4f7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1061.633772] env[61545]: DEBUG nova.compute.manager [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Refreshing instance network info cache due to event network-changed-5a70c23b-fb28-4302-add1-42b019c5c4f7. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1061.633999] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Acquiring lock "refresh_cache-2f8567b1-7291-4705-8ef3-23547eb4860e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.634173] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Acquired lock "refresh_cache-2f8567b1-7291-4705-8ef3-23547eb4860e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.634355] env[61545]: DEBUG nova.network.neutron [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Refreshing network info cache for port 5a70c23b-fb28-4302-add1-42b019c5c4f7 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1061.783981] env[61545]: DEBUG nova.compute.manager [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1061.784327] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1061.785387] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7196ddc-6fc1-4ea8-9ed7-294e75d658a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.795814] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1061.796201] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f6c91c6-a89a-4209-bfab-d4f559f6b2bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.798962] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "1e5be92c-d727-4515-9e16-85ade2719455" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.801025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.809273] env[61545]: DEBUG oslo_vmware.api [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1061.809273] env[61545]: value = "task-4256400" [ 1061.809273] env[61545]: _type = "Task" [ 1061.809273] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.817599] env[61545]: DEBUG oslo_vmware.api [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256400, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.837996] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1061.838469] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2914d3a-ea53-4869-adfc-2ab6e46c58d7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.847276] env[61545]: DEBUG oslo_vmware.api [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1061.847276] env[61545]: value = "task-4256401" [ 1061.847276] env[61545]: _type = "Task" [ 1061.847276] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.860015] env[61545]: DEBUG oslo_vmware.api [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.025513] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1062.025513] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9302eb0e-5a8d-41f2-bb11-c253f6d8c30b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.039274] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1062.039274] env[61545]: value = "task-4256402" [ 1062.039274] env[61545]: _type = "Task" [ 1062.039274] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.099412] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1062.099806] env[61545]: DEBUG nova.compute.manager [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1062.102132] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c699879b-5648-4829-b96b-6bcbbb60b771 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.128119] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256399, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.128672] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.128829] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.129014] env[61545]: DEBUG nova.network.neutron [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.131719] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5260c779-8f2a-80ec-00b6-5976bffd4c39, 'name': SearchDatastore_Task, 'duration_secs': 0.020614} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.133529] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a37fa75-88aa-4b4f-9bc4-6b9f1577958f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.148782] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1062.148782] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5211c6ba-13cb-da04-b28a-23f1b41269fc" [ 1062.148782] env[61545]: _type = "Task" [ 1062.148782] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.160977] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5211c6ba-13cb-da04-b28a-23f1b41269fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.307116] env[61545]: INFO nova.compute.manager [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Detaching volume dc427f98-25af-4605-aa76-6df488552e30 [ 1062.327451] env[61545]: DEBUG oslo_vmware.api [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256400, 'name': PowerOffVM_Task, 'duration_secs': 0.275861} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.327859] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.328122] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1062.328468] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19f19740-370d-46e0-8af0-b8ccbd41e715 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.360181] env[61545]: DEBUG oslo_vmware.api [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256401, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.370718] env[61545]: INFO nova.virt.block_device [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Attempting to driver detach volume dc427f98-25af-4605-aa76-6df488552e30 from mountpoint /dev/sdb [ 1062.371129] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1062.371372] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838754', 'volume_id': 'dc427f98-25af-4605-aa76-6df488552e30', 'name': 'volume-dc427f98-25af-4605-aa76-6df488552e30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1e5be92c-d727-4515-9e16-85ade2719455', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc427f98-25af-4605-aa76-6df488552e30', 'serial': 'dc427f98-25af-4605-aa76-6df488552e30'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1062.372327] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2db605a-83b7-4d78-8762-e085400b2826 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.405830] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2f38e8-b9df-4baf-b5a0-97db0752b503 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.416254] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cc3410-9885-4b6d-baf7-9b7f353af9cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.448012] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50a950d-bcc8-4fd4-988e-5315472ae1b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.450881] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1062.451274] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1062.451479] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleting the datastore file [datastore2] 8d838d3b-32ad-4bb2-839e-6bd81c363447 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1062.452023] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fec6d48-75fa-49eb-93aa-abcebbc2a308 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.467874] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] The volume has not been displaced from its original location: [datastore1] volume-dc427f98-25af-4605-aa76-6df488552e30/volume-dc427f98-25af-4605-aa76-6df488552e30.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1062.473927] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfiguring VM instance instance-00000041 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1062.480967] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8682ef2e-5565-449c-9b28-b9effa9219d9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.493982] env[61545]: DEBUG oslo_vmware.api [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1062.493982] env[61545]: value = "task-4256404" [ 1062.493982] env[61545]: _type = "Task" [ 1062.493982] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.502242] env[61545]: DEBUG oslo_vmware.api [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1062.502242] env[61545]: value = "task-4256405" [ 1062.502242] env[61545]: _type = "Task" [ 1062.502242] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.508630] env[61545]: DEBUG oslo_vmware.api [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.520141] env[61545]: DEBUG oslo_vmware.api [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256405, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.580572] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256399, 'name': Destroy_Task, 'duration_secs': 0.637853} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.580572] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Destroyed the VM [ 1062.580572] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1062.580572] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e41f46cd-2483-4490-82b3-c62ee8825496 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.588100] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1062.588100] env[61545]: value = "task-4256406" [ 1062.588100] env[61545]: _type = "Task" [ 1062.588100] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.601587] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256406, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.604599] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b06702-4575-4973-a981-4e48c8b5ea63 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.615397] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baac2782-2c81-4873-955d-0462ef13bfb9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.664999] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5467476c-d129-4206-9b65-34fafe76bfe9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.682723] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5211c6ba-13cb-da04-b28a-23f1b41269fc, 'name': SearchDatastore_Task, 'duration_secs': 0.018048} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.684338] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fab1c3a-847a-40d9-b027-5a055c1788d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.689863] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.690180] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e/2f8567b1-7291-4705-8ef3-23547eb4860e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1062.690488] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a723425-3456-4ddc-a206-222b7530f0a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.706090] env[61545]: DEBUG nova.compute.provider_tree [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.712050] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1062.712050] env[61545]: value = "task-4256407" [ 1062.712050] env[61545]: _type = "Task" [ 1062.712050] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.720486] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.867995] env[61545]: DEBUG oslo_vmware.api [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256401, 'name': PowerOnVM_Task, 'duration_secs': 0.549558} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.868437] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1062.868676] env[61545]: DEBUG nova.compute.manager [None req-076ac262-4b2b-42be-ae28-235e0e0a85fe tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1062.869641] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a2786c-c78e-46ba-b81c-971d24141236 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.005784] env[61545]: DEBUG oslo_vmware.api [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260636} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.007404] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1063.007404] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1063.007404] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1063.007404] env[61545]: INFO nova.compute.manager [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1063.007404] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1063.007404] env[61545]: DEBUG nova.compute.manager [-] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1063.007404] env[61545]: DEBUG nova.network.neutron [-] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1063.020477] env[61545]: DEBUG oslo_vmware.api [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256405, 'name': ReconfigVM_Task, 'duration_secs': 0.321075} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.020477] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Reconfigured VM instance instance-00000041 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1063.026020] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8704bfd-95c2-4706-81fc-3f7ab3395d1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.044525] env[61545]: DEBUG oslo_vmware.api [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1063.044525] env[61545]: value = "task-4256408" [ 1063.044525] env[61545]: _type = "Task" [ 1063.044525] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.059206] env[61545]: DEBUG oslo_vmware.api [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256408, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.069056] env[61545]: DEBUG nova.network.neutron [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Updated VIF entry in instance network info cache for port 5a70c23b-fb28-4302-add1-42b019c5c4f7. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1063.070066] env[61545]: DEBUG nova.network.neutron [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Updating instance_info_cache with network_info: [{"id": "5a70c23b-fb28-4302-add1-42b019c5c4f7", "address": "fa:16:3e:03:f2:5c", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a70c23b-fb", "ovs_interfaceid": "5a70c23b-fb28-4302-add1-42b019c5c4f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.106539] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256406, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.211372] env[61545]: DEBUG nova.scheduler.client.report [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.231759] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256407, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.544410] env[61545]: DEBUG nova.network.neutron [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Updating instance_info_cache with network_info: [{"id": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "address": "fa:16:3e:28:6c:a3", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b10a96e-65", "ovs_interfaceid": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.558769] env[61545]: DEBUG oslo_vmware.api [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256408, 'name': ReconfigVM_Task, 'duration_secs': 0.235958} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.560161] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838754', 'volume_id': 'dc427f98-25af-4605-aa76-6df488552e30', 'name': 'volume-dc427f98-25af-4605-aa76-6df488552e30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1e5be92c-d727-4515-9e16-85ade2719455', 'attached_at': '', 'detached_at': '', 'volume_id': 'dc427f98-25af-4605-aa76-6df488552e30', 'serial': 'dc427f98-25af-4605-aa76-6df488552e30'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1063.572157] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Releasing lock "refresh_cache-2f8567b1-7291-4705-8ef3-23547eb4860e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.572841] env[61545]: DEBUG nova.compute.manager [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received event network-vif-unplugged-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1063.572841] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Acquiring lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.572841] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.573245] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.573245] env[61545]: DEBUG nova.compute.manager [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] No waiting events found dispatching network-vif-unplugged-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.573366] env[61545]: WARNING nova.compute.manager [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received unexpected event network-vif-unplugged-2147b830-281d-4a24-90d1-22eccefc4c5c for instance with vm_state shelved_offloaded and task_state None. [ 1063.573481] env[61545]: DEBUG nova.compute.manager [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received event network-changed-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1063.573612] env[61545]: DEBUG nova.compute.manager [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Refreshing instance network info cache due to event network-changed-2147b830-281d-4a24-90d1-22eccefc4c5c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1063.573882] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Acquiring lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.573882] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Acquired lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.574030] env[61545]: DEBUG nova.network.neutron [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Refreshing network info cache for port 2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.604665] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256406, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.611033] env[61545]: DEBUG nova.network.neutron [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Successfully updated port: 56c23893-2f0c-48e2-ad52-6dcdf0869d98 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1063.621717] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.622197] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.622432] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.622722] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.623117] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.625729] env[61545]: INFO nova.compute.manager [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Terminating instance [ 1063.730449] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.798612} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.732095] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e/2f8567b1-7291-4705-8ef3-23547eb4860e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1063.732095] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1063.732408] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a87891cd-902a-42d9-bcdf-4b11239e1d46 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.741074] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1063.741074] env[61545]: value = "task-4256409" [ 1063.741074] env[61545]: _type = "Task" [ 1063.741074] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.753694] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256409, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.895272] env[61545]: DEBUG nova.compute.manager [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Received event network-vif-plugged-56c23893-2f0c-48e2-ad52-6dcdf0869d98 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1063.895619] env[61545]: DEBUG oslo_concurrency.lockutils [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] Acquiring lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.895836] env[61545]: DEBUG oslo_concurrency.lockutils [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] Lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.896059] env[61545]: DEBUG oslo_concurrency.lockutils [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] Lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.896286] env[61545]: DEBUG nova.compute.manager [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] No waiting events found dispatching network-vif-plugged-56c23893-2f0c-48e2-ad52-6dcdf0869d98 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.896476] env[61545]: WARNING nova.compute.manager [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Received unexpected event network-vif-plugged-56c23893-2f0c-48e2-ad52-6dcdf0869d98 for instance with vm_state building and task_state spawning. [ 1063.896642] env[61545]: DEBUG nova.compute.manager [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Received event network-changed-56c23893-2f0c-48e2-ad52-6dcdf0869d98 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1063.896794] env[61545]: DEBUG nova.compute.manager [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Refreshing instance network info cache due to event network-changed-56c23893-2f0c-48e2-ad52-6dcdf0869d98. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1063.896977] env[61545]: DEBUG oslo_concurrency.lockutils [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] Acquiring lock "refresh_cache-db2d0e21-f6bb-4f61-8d54-e9191de13a59" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.897142] env[61545]: DEBUG oslo_concurrency.lockutils [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] Acquired lock "refresh_cache-db2d0e21-f6bb-4f61-8d54-e9191de13a59" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.897272] env[61545]: DEBUG nova.network.neutron [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Refreshing network info cache for port 56c23893-2f0c-48e2-ad52-6dcdf0869d98 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1064.046524] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.109824] env[61545]: DEBUG oslo_vmware.api [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256406, 'name': RemoveSnapshot_Task, 'duration_secs': 1.047136} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.110933] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1064.119024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "refresh_cache-db2d0e21-f6bb-4f61-8d54-e9191de13a59" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.125730] env[61545]: DEBUG nova.objects.instance [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lazy-loading 'flavor' on Instance uuid 1e5be92c-d727-4515-9e16-85ade2719455 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.130197] env[61545]: DEBUG nova.compute.manager [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1064.131980] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.131980] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a137458-f8f9-453e-9d35-04060667a206 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.144277] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.146123] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7b9e522-4d4a-4acb-847c-37258107dbc4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.155495] env[61545]: DEBUG oslo_vmware.api [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1064.155495] env[61545]: value = "task-4256410" [ 1064.155495] env[61545]: _type = "Task" [ 1064.155495] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.168843] env[61545]: DEBUG oslo_vmware.api [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256410, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.233327] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.743s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.240522] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.310s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.240522] env[61545]: DEBUG nova.objects.instance [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lazy-loading 'resources' on Instance uuid 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.255591] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256409, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109357} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.255591] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1064.256204] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0ca574-a07a-453e-9404-c1f8247c6f56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.287514] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e/2f8567b1-7291-4705-8ef3-23547eb4860e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.287969] env[61545]: DEBUG nova.network.neutron [-] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.290058] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d69bf2e8-88ee-4745-a065-746c6df353ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.311190] env[61545]: INFO nova.compute.manager [-] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Took 1.30 seconds to deallocate network for instance. [ 1064.320246] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1064.320246] env[61545]: value = "task-4256411" [ 1064.320246] env[61545]: _type = "Task" [ 1064.320246] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.330338] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256411, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.532623] env[61545]: DEBUG nova.network.neutron [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1064.618633] env[61545]: WARNING nova.compute.manager [None req-1c55f61c-7a14-49fe-8edc-cff561698570 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Image not found during snapshot: nova.exception.ImageNotFound: Image f0939709-3ee1-4707-9cf9-dc346e075998 could not be found. [ 1064.666810] env[61545]: DEBUG oslo_vmware.api [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256410, 'name': PowerOffVM_Task, 'duration_secs': 0.368398} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.667498] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.667751] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.668063] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f18c2627-5eda-41f2-b85e-f2f17b735488 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.737121] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.737121] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.737121] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleting the datastore file [datastore2] 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.737505] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6416102f-bf83-4e5b-99e8-e8858de8e608 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.747244] env[61545]: DEBUG oslo_vmware.api [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1064.747244] env[61545]: value = "task-4256413" [ 1064.747244] env[61545]: _type = "Task" [ 1064.747244] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.764108] env[61545]: DEBUG oslo_vmware.api [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.813317] env[61545]: INFO nova.scheduler.client.report [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted allocation for migration 4a8c88d2-213c-4f97-884a-8726cbc598fe [ 1064.818702] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.834792] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256411, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.936591] env[61545]: DEBUG nova.network.neutron [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.011180] env[61545]: DEBUG nova.network.neutron [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updated VIF entry in instance network info cache for port 2147b830-281d-4a24-90d1-22eccefc4c5c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1065.011972] env[61545]: DEBUG nova.network.neutron [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating instance_info_cache with network_info: [{"id": "2147b830-281d-4a24-90d1-22eccefc4c5c", "address": "fa:16:3e:57:07:3e", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2147b830-28", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.111427] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea6fd2e-abc4-4922-87db-8e1cb596fd62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.120348] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53e130a-90b0-4ebf-9014-9e773ba47c78 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.155862] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b1506dc8-8225-4ae2-a48e-ca21d00df9c0 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.356s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.158075] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02781b04-27bc-497b-a19b-48836ff5e178 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.167292] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a909bee1-52cc-4ff8-b9e9-56a4ba8df6d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.183923] env[61545]: DEBUG nova.compute.provider_tree [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.259744] env[61545]: DEBUG oslo_vmware.api [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256413, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246974} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.260114] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.262931] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.263167] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.263365] env[61545]: INFO nova.compute.manager [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1065.263648] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.264201] env[61545]: DEBUG nova.compute.manager [-] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1065.264320] env[61545]: DEBUG nova.network.neutron [-] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.324545] env[61545]: DEBUG oslo_concurrency.lockutils [None req-440aed4c-85c5-4ed2-b8f2-4471b4a680a7 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 14.711s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.336642] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256411, 'name': ReconfigVM_Task, 'duration_secs': 0.958127} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.337235] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e/2f8567b1-7291-4705-8ef3-23547eb4860e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.338207] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eda6fcaf-9205-404c-9958-78520c097009 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.344487] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1065.344487] env[61545]: value = "task-4256414" [ 1065.344487] env[61545]: _type = "Task" [ 1065.344487] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.357036] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256414, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.441264] env[61545]: DEBUG oslo_concurrency.lockutils [req-493680df-d11d-4f7c-b601-fb4adde0f4aa req-eeb9bf90-978c-44b1-aa07-a59716cdd113 service nova] Releasing lock "refresh_cache-db2d0e21-f6bb-4f61-8d54-e9191de13a59" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.441625] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "refresh_cache-db2d0e21-f6bb-4f61-8d54-e9191de13a59" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.441874] env[61545]: DEBUG nova.network.neutron [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1065.518797] env[61545]: DEBUG oslo_concurrency.lockutils [req-a748044e-2d30-4b5e-8e0d-fb351d4af877 req-bcdc1911-bf3d-4092-b849-683fa2fad317 service nova] Releasing lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.590019] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1065.590753] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e03679-debc-4ea6-930a-c4bf1ae96590 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.599897] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.607232] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.610019] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.610019] env[61545]: DEBUG nova.network.neutron [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1065.610019] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c14c2e7-e0d7-4b2a-954f-555d5c664384 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.685466] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.685945] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.685945] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleting the datastore file [datastore2] 8b9c45a7-3574-47c9-b46b-34eed554fdc8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.686422] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8660803c-4290-485c-9a26-7db7cb447494 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.689748] env[61545]: DEBUG nova.scheduler.client.report [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.702177] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1065.702177] env[61545]: value = "task-4256416" [ 1065.702177] env[61545]: _type = "Task" [ 1065.702177] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.717668] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256416, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.733292] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "f9c9c447-e676-4143-b329-fb6d71bcd553" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.858726] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256414, 'name': Rename_Task, 'duration_secs': 0.208981} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.859034] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1065.859453] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9f21fc9-fb97-4b3c-9261-d46278fcb75b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.867338] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1065.867338] env[61545]: value = "task-4256417" [ 1065.867338] env[61545]: _type = "Task" [ 1065.867338] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.876812] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256417, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.036449] env[61545]: DEBUG nova.network.neutron [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1066.197865] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.959s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.200419] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.923s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.200686] env[61545]: DEBUG nova.objects.instance [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lazy-loading 'resources' on Instance uuid f9c9c447-e676-4143-b329-fb6d71bcd553 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.213538] env[61545]: DEBUG oslo_vmware.api [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256416, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306857} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.215985] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1066.216205] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1066.216382] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1066.225411] env[61545]: INFO nova.scheduler.client.report [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Deleted allocations for instance 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b [ 1066.242518] env[61545]: INFO nova.scheduler.client.report [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted allocations for instance 8b9c45a7-3574-47c9-b46b-34eed554fdc8 [ 1066.325248] env[61545]: DEBUG oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207200e-7bf3-5d19-ff31-c486cefbbd95/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1066.326928] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0f2862-6534-4d70-8075-8917290fa8f0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.334343] env[61545]: DEBUG oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207200e-7bf3-5d19-ff31-c486cefbbd95/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1066.334538] env[61545]: ERROR oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207200e-7bf3-5d19-ff31-c486cefbbd95/disk-0.vmdk due to incomplete transfer. [ 1066.334772] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-868d1e1d-f898-4650-a028-f1351fecbb34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.342657] env[61545]: DEBUG oslo_vmware.rw_handles [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5207200e-7bf3-5d19-ff31-c486cefbbd95/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1066.342859] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Uploaded image f5b525de-d65d-4769-8dbe-dd1a78b8f257 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1066.344907] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1066.345491] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ec7862a4-b8cb-412e-be9f-e0f3d3f1c270 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.352649] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1066.352649] env[61545]: value = "task-4256418" [ 1066.352649] env[61545]: _type = "Task" [ 1066.352649] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.362065] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256418, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.384312] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256417, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.503926] env[61545]: DEBUG nova.network.neutron [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Updating instance_info_cache with network_info: [{"id": "56c23893-2f0c-48e2-ad52-6dcdf0869d98", "address": "fa:16:3e:db:2c:8f", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c23893-2f", "ovs_interfaceid": "56c23893-2f0c-48e2-ad52-6dcdf0869d98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.541970] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Received event network-vif-deleted-71a8926c-7ea6-4f74-b990-81464c47f0c1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1066.542399] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received event network-vif-plugged-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1066.542596] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.542821] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.542954] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.543132] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] No waiting events found dispatching network-vif-plugged-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1066.543293] env[61545]: WARNING nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received unexpected event network-vif-plugged-7e7e6bd8-fac2-4516-af29-a249216acca6 for instance with vm_state shelved_offloaded and task_state spawning. [ 1066.543531] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Received event network-vif-unplugged-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1066.543776] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Acquiring lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.543776] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.543925] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.544103] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] No waiting events found dispatching network-vif-unplugged-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1066.544270] env[61545]: WARNING nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Received unexpected event network-vif-unplugged-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 for instance with vm_state shelved_offloaded and task_state None. [ 1066.544429] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Received event network-changed-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1066.544580] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Refreshing instance network info cache due to event network-changed-6b10a96e-6545-4419-9cd0-afa5ce3e2cc2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1066.544762] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Acquiring lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.544896] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Acquired lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.545064] env[61545]: DEBUG nova.network.neutron [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Refreshing network info cache for port 6b10a96e-6545-4419-9cd0-afa5ce3e2cc2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1066.702914] env[61545]: DEBUG nova.objects.instance [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lazy-loading 'numa_topology' on Instance uuid f9c9c447-e676-4143-b329-fb6d71bcd553 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.716504] env[61545]: DEBUG nova.network.neutron [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating instance_info_cache with network_info: [{"id": "7e7e6bd8-fac2-4516-af29-a249216acca6", "address": "fa:16:3e:b8:a3:67", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e6bd8-fa", "ovs_interfaceid": "7e7e6bd8-fac2-4516-af29-a249216acca6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.734167] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5df2ad2e-46a2-4067-9dfe-0b3554a64dfe tempest-ServersTestBootFromVolume-2047470289 tempest-ServersTestBootFromVolume-2047470289-project-member] Lock "04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.940s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.749482] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.863793] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256418, 'name': Destroy_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.880643] env[61545]: DEBUG oslo_vmware.api [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256417, 'name': PowerOnVM_Task, 'duration_secs': 0.629704} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.881301] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1066.881577] env[61545]: INFO nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1066.881768] env[61545]: DEBUG nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.882621] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821e9e50-c0e3-4432-a2aa-cb737c3899b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.007473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "b33e29cc-fe26-429a-8799-8d790667cc1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.007789] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "b33e29cc-fe26-429a-8799-8d790667cc1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.008014] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "b33e29cc-fe26-429a-8799-8d790667cc1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.008212] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "b33e29cc-fe26-429a-8799-8d790667cc1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.008491] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "b33e29cc-fe26-429a-8799-8d790667cc1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.010863] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "refresh_cache-db2d0e21-f6bb-4f61-8d54-e9191de13a59" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.011168] env[61545]: DEBUG nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Instance network_info: |[{"id": "56c23893-2f0c-48e2-ad52-6dcdf0869d98", "address": "fa:16:3e:db:2c:8f", "network": {"id": "33c2f50c-3696-413b-a692-979d6f654147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-25769496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9007a6e389c0467c8e2077309984eaab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c23893-2f", "ovs_interfaceid": "56c23893-2f0c-48e2-ad52-6dcdf0869d98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1067.011630] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:2c:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d0c6fd7-3cc9-4818-9475-8f15900394cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56c23893-2f0c-48e2-ad52-6dcdf0869d98', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.019882] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1067.019954] env[61545]: INFO nova.compute.manager [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Terminating instance [ 1067.021470] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.021880] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-959d2f7a-7944-4e73-bbce-3f4c80e53e56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.043787] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1067.043787] env[61545]: value = "task-4256419" [ 1067.043787] env[61545]: _type = "Task" [ 1067.043787] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.053493] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256419, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.075501] env[61545]: DEBUG nova.network.neutron [-] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.092051] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "844f01ed-4dae-4e13-9d1c-09a73f413201" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.092340] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.092553] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.092857] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.092996] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.095273] env[61545]: INFO nova.compute.manager [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Terminating instance [ 1067.184793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "1e5be92c-d727-4515-9e16-85ade2719455" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.184793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.184793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "1e5be92c-d727-4515-9e16-85ade2719455-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.184793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.185247] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.187511] env[61545]: INFO nova.compute.manager [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Terminating instance [ 1067.205397] env[61545]: DEBUG nova.objects.base [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1067.219910] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.261552] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='97a77112f5b2a22ccf7df2f8b2cd9c2d',container_format='bare',created_at=2025-06-03T12:52:37Z,direct_url=,disk_format='vmdk',id=5bc7010a-ee45-48d1-87c9-e47216be1200,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1834208554-shelved',owner='50b8a600a38442278d0cf036919f87c2',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-06-03T12:53:01Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1067.262236] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1067.262236] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1067.262369] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1067.262531] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1067.262716] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1067.263118] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1067.263252] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1067.263567] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1067.263869] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1067.264073] env[61545]: DEBUG nova.virt.hardware [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1067.265138] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8780b9c2-2399-45b1-9dd5-ca4d43b35162 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.280030] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc710d4-496a-4c89-ac3a-56c1522d96e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.301825] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:a3:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5116f690-f825-4fee-8a47-42b073e716c5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e7e6bd8-fac2-4516-af29-a249216acca6', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.311327] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1067.313242] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.313484] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-029b7acd-c7e6-4030-937d-22e6372d730e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.341747] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1067.341747] env[61545]: value = "task-4256420" [ 1067.341747] env[61545]: _type = "Task" [ 1067.341747] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.355323] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256420, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.366542] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256418, 'name': Destroy_Task, 'duration_secs': 0.836664} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.366883] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Destroyed the VM [ 1067.367143] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1067.367409] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5ebaac1d-1cd1-4d52-9a91-dfb7fb129e32 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.377532] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1067.377532] env[61545]: value = "task-4256421" [ 1067.377532] env[61545]: _type = "Task" [ 1067.377532] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.389615] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256421, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.402925] env[61545]: INFO nova.compute.manager [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Took 30.13 seconds to build instance. [ 1067.458983] env[61545]: DEBUG nova.network.neutron [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Updated VIF entry in instance network info cache for port 6b10a96e-6545-4419-9cd0-afa5ce3e2cc2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1067.459388] env[61545]: DEBUG nova.network.neutron [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Updating instance_info_cache with network_info: [{"id": "6b10a96e-6545-4419-9cd0-afa5ce3e2cc2", "address": "fa:16:3e:28:6c:a3", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": null, "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap6b10a96e-65", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.540045] env[61545]: DEBUG nova.compute.manager [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1067.540045] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1067.540798] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae055be9-8552-4502-ac2a-46115c9d7984 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.552738] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1067.553397] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f7c61a4-e97b-42e9-bacf-f889b7ea9af2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.558493] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256419, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.560642] env[61545]: DEBUG oslo_vmware.api [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1067.560642] env[61545]: value = "task-4256422" [ 1067.560642] env[61545]: _type = "Task" [ 1067.560642] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.561347] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a52903-7637-4d02-bed1-68aa36d6d48d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.572768] env[61545]: DEBUG oslo_vmware.api [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.575429] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0dccd8-1ed0-425e-a94a-3437fd59098b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.579232] env[61545]: INFO nova.compute.manager [-] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Took 2.31 seconds to deallocate network for instance. [ 1067.628936] env[61545]: DEBUG nova.compute.manager [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1067.629231] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1067.631867] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d3d18c-b701-451e-a163-c170dc74b5ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.635280] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51bda11-c2d6-45c3-8a98-8ec78362ede1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.652068] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97588d7b-52f9-4470-99a2-028fc83304a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.664101] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1067.668540] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c32d762e-5834-4ccd-bb05-7b0b2bc6fed5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.696660] env[61545]: DEBUG nova.compute.manager [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1067.702023] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1067.702023] env[61545]: DEBUG nova.compute.provider_tree [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1067.702023] env[61545]: DEBUG oslo_vmware.api [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1067.702023] env[61545]: value = "task-4256423" [ 1067.702023] env[61545]: _type = "Task" [ 1067.702023] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.708344] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf5c5af-1c6d-4f08-ad3d-1740aa161e7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.739082] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1067.739082] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54f947bb-9c2c-45f5-992a-b04ee6f26fb4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.757488] env[61545]: DEBUG oslo_vmware.api [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1067.757488] env[61545]: value = "task-4256424" [ 1067.757488] env[61545]: _type = "Task" [ 1067.757488] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.768858] env[61545]: DEBUG oslo_vmware.api [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256424, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.856523] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256420, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.887667] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256421, 'name': RemoveSnapshot_Task} progress is 43%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.904422] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5e0aafa4-0b65-438f-ac22-bf8e5aa45459 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.652s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.964152] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Releasing lock "refresh_cache-8b9c45a7-3574-47c9-b46b-34eed554fdc8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.964514] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received event network-changed-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1067.964726] env[61545]: DEBUG nova.compute.manager [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Refreshing instance network info cache due to event network-changed-7e7e6bd8-fac2-4516-af29-a249216acca6. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1067.964995] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Acquiring lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.965290] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Acquired lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.965555] env[61545]: DEBUG nova.network.neutron [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Refreshing network info cache for port 7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.056980] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256419, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.074413] env[61545]: DEBUG oslo_vmware.api [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.133685] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.142447] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.224161] env[61545]: DEBUG oslo_vmware.api [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256423, 'name': PowerOffVM_Task, 'duration_secs': 0.471675} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.224472] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1068.224719] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1068.225064] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c5451c7-ffc9-4200-b81e-9883b400bacd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.238247] env[61545]: ERROR nova.scheduler.client.report [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [req-29a7998e-4b49-4aed-807b-1f50f1eb5713] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-29a7998e-4b49-4aed-807b-1f50f1eb5713"}]} [ 1068.255757] env[61545]: DEBUG nova.scheduler.client.report [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1068.269353] env[61545]: DEBUG oslo_vmware.api [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256424, 'name': PowerOffVM_Task, 'duration_secs': 0.394985} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.269711] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1068.269811] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1068.270090] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69c299b7-eb03-4094-b595-b5763032d467 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.276060] env[61545]: DEBUG nova.scheduler.client.report [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1068.276385] env[61545]: DEBUG nova.compute.provider_tree [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 244, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1068.289543] env[61545]: DEBUG nova.scheduler.client.report [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1068.307189] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1068.308035] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1068.308035] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleting the datastore file [datastore2] 844f01ed-4dae-4e13-9d1c-09a73f413201 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.309027] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0f8c575-63dd-46fc-9fc7-aeeb6c17878c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.315259] env[61545]: DEBUG nova.scheduler.client.report [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1068.327744] env[61545]: DEBUG oslo_vmware.api [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1068.327744] env[61545]: value = "task-4256427" [ 1068.327744] env[61545]: _type = "Task" [ 1068.327744] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.341447] env[61545]: DEBUG oslo_vmware.api [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.359659] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256420, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.361438] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1068.361740] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1068.361973] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleting the datastore file [datastore2] 1e5be92c-d727-4515-9e16-85ade2719455 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.362341] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f988e67-818e-40fe-9dfa-51f4a0b92d99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.373136] env[61545]: DEBUG oslo_vmware.api [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1068.373136] env[61545]: value = "task-4256428" [ 1068.373136] env[61545]: _type = "Task" [ 1068.373136] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.392801] env[61545]: DEBUG oslo_vmware.api [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256428, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.396780] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256421, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.560872] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256419, 'name': CreateVM_Task, 'duration_secs': 1.410218} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.561085] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.561864] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.562061] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.562426] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1068.562704] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7004854c-747e-42f1-8ea7-73b37b61e0b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.574936] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1068.574936] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c04cd3-83d3-d1da-35b6-41e4881f1b52" [ 1068.574936] env[61545]: _type = "Task" [ 1068.574936] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.584847] env[61545]: DEBUG oslo_vmware.api [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.592540] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c04cd3-83d3-d1da-35b6-41e4881f1b52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.670013] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec07139-8fb5-4fbb-a5d6-91d50cbd1d1a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.679318] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046be4be-7610-4110-9696-3fc4e7cc47f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.717626] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd1f06a-2a00-488c-8888-3866d271f685 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.726612] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f689cdf-e581-4cba-8cbf-d0580b9a564d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.741580] env[61545]: DEBUG nova.compute.provider_tree [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1068.750737] env[61545]: DEBUG nova.compute.manager [req-a6ffd310-e788-4885-a570-59b22c091354 req-8e721c15-2029-4d7f-8840-62e672a94a13 service nova] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Received event network-vif-deleted-d36c7f3a-2a3b-44a8-bfe6-1af85f921b3f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1068.803107] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "2f8567b1-7291-4705-8ef3-23547eb4860e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.803472] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.803735] env[61545]: DEBUG nova.compute.manager [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.805024] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f12a829-5e48-440b-b4a4-d2b3bf588947 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.815496] env[61545]: DEBUG nova.compute.manager [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1068.816499] env[61545]: DEBUG nova.objects.instance [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'flavor' on Instance uuid 2f8567b1-7291-4705-8ef3-23547eb4860e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.842241] env[61545]: DEBUG oslo_vmware.api [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295929} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.846068] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.846068] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.846068] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.846068] env[61545]: INFO nova.compute.manager [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1068.846068] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.846068] env[61545]: DEBUG nova.compute.manager [-] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1068.846068] env[61545]: DEBUG nova.network.neutron [-] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1068.856232] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256420, 'name': CreateVM_Task, 'duration_secs': 1.377856} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.856409] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.857087] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.857249] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.857643] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1068.857903] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae8a05ae-e17a-4e0b-912d-3d3a29ce516a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.863520] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1068.863520] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525212d7-77db-cc12-4ab3-1dfde04e4d21" [ 1068.863520] env[61545]: _type = "Task" [ 1068.863520] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.872485] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525212d7-77db-cc12-4ab3-1dfde04e4d21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.885156] env[61545]: DEBUG oslo_vmware.api [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256428, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.351386} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.888444] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.888663] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.888844] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.889022] env[61545]: INFO nova.compute.manager [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1068.889265] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.889502] env[61545]: DEBUG nova.compute.manager [-] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1068.889590] env[61545]: DEBUG nova.network.neutron [-] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1068.896861] env[61545]: DEBUG oslo_vmware.api [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256421, 'name': RemoveSnapshot_Task, 'duration_secs': 1.204983} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.897121] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1068.897351] env[61545]: INFO nova.compute.manager [None req-94286e92-3789-4e3b-ad61-6dfb708f6878 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Took 17.98 seconds to snapshot the instance on the hypervisor. [ 1069.076610] env[61545]: DEBUG oslo_vmware.api [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256422, 'name': PowerOffVM_Task, 'duration_secs': 1.139602} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.081354] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1069.081540] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1069.081797] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d02f054-5803-4907-ae77-f1edb6343f06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.090918] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c04cd3-83d3-d1da-35b6-41e4881f1b52, 'name': SearchDatastore_Task, 'duration_secs': 0.034946} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.091259] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.091574] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.091836] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.091988] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.092189] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.092806] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbf831db-3adf-48be-9f3d-184df47ef475 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.102692] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.102928] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.103680] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ca46e47-af86-4cdf-9e1c-a39461654cb4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.110319] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1069.110319] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e8ef69-9342-7e25-b784-1d2ae9a2b085" [ 1069.110319] env[61545]: _type = "Task" [ 1069.110319] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.119414] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e8ef69-9342-7e25-b784-1d2ae9a2b085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.149657] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1069.149989] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1069.150207] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleting the datastore file [datastore2] b33e29cc-fe26-429a-8799-8d790667cc1d {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1069.150503] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa0f0f06-0e89-4eba-81c1-b5266c392e78 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.158033] env[61545]: DEBUG oslo_vmware.api [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1069.158033] env[61545]: value = "task-4256430" [ 1069.158033] env[61545]: _type = "Task" [ 1069.158033] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.170324] env[61545]: DEBUG oslo_vmware.api [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.286087] env[61545]: DEBUG nova.scheduler.client.report [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 116 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1069.286434] env[61545]: DEBUG nova.compute.provider_tree [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 116 to 117 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1069.286651] env[61545]: DEBUG nova.compute.provider_tree [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1069.376598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.376899] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Processing image 5bc7010a-ee45-48d1-87c9-e47216be1200 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.377190] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200/5bc7010a-ee45-48d1-87c9-e47216be1200.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.377355] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200/5bc7010a-ee45-48d1-87c9-e47216be1200.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.377547] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.377840] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b84553b-d1b7-4cc2-bbfd-a9e98d3acd7e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.387688] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.387947] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.388809] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eb38cc4-eb96-402e-b8da-c9a2311d3432 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.395096] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1069.395096] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52875efe-52f6-ff11-9055-da9331f17365" [ 1069.395096] env[61545]: _type = "Task" [ 1069.395096] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.409496] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52875efe-52f6-ff11-9055-da9331f17365, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.608176] env[61545]: DEBUG nova.network.neutron [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updated VIF entry in instance network info cache for port 7e7e6bd8-fac2-4516-af29-a249216acca6. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1069.608652] env[61545]: DEBUG nova.network.neutron [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating instance_info_cache with network_info: [{"id": "7e7e6bd8-fac2-4516-af29-a249216acca6", "address": "fa:16:3e:b8:a3:67", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e7e6bd8-fa", "ovs_interfaceid": "7e7e6bd8-fac2-4516-af29-a249216acca6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.622210] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e8ef69-9342-7e25-b784-1d2ae9a2b085, 'name': SearchDatastore_Task, 'duration_secs': 0.017199} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.623821] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93d161b7-d7c4-453c-9e94-4905a1e16124 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.631145] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1069.631145] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523db906-536c-4c9e-800d-a00b4ca1a683" [ 1069.631145] env[61545]: _type = "Task" [ 1069.631145] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.640121] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523db906-536c-4c9e-800d-a00b4ca1a683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.671990] env[61545]: DEBUG oslo_vmware.api [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.372912} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.672265] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1069.672447] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1069.672620] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1069.672790] env[61545]: INFO nova.compute.manager [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Took 2.13 seconds to destroy the instance on the hypervisor. [ 1069.673068] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1069.673460] env[61545]: DEBUG nova.compute.manager [-] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1069.673460] env[61545]: DEBUG nova.network.neutron [-] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1069.792368] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.592s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.794757] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.396s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.795131] env[61545]: DEBUG nova.objects.instance [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'resources' on Instance uuid 855904d8-7eb3-405d-9236-ab4ba9b33940 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.824292] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.824959] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53c4afdf-316d-4747-9611-9266240267c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.833464] env[61545]: DEBUG oslo_vmware.api [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1069.833464] env[61545]: value = "task-4256431" [ 1069.833464] env[61545]: _type = "Task" [ 1069.833464] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.848024] env[61545]: DEBUG oslo_vmware.api [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256431, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.927025] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Preparing fetch location {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1069.927025] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Fetch image to [datastore2] OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d/OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d.vmdk {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1069.927025] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Downloading stream optimized image 5bc7010a-ee45-48d1-87c9-e47216be1200 to [datastore2] OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d/OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d.vmdk on the data store datastore2 as vApp {{(pid=61545) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1069.927025] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Downloading image file data 5bc7010a-ee45-48d1-87c9-e47216be1200 to the ESX as VM named 'OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d' {{(pid=61545) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1070.040229] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1070.040229] env[61545]: value = "resgroup-9" [ 1070.040229] env[61545]: _type = "ResourcePool" [ 1070.040229] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1070.041440] env[61545]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c73e046c-b527-43fb-8656-eec98fc2dd3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.066368] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lease: (returnval){ [ 1070.066368] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5c460-fcea-fae9-8808-d81505756da2" [ 1070.066368] env[61545]: _type = "HttpNfcLease" [ 1070.066368] env[61545]: } obtained for vApp import into resource pool (val){ [ 1070.066368] env[61545]: value = "resgroup-9" [ 1070.066368] env[61545]: _type = "ResourcePool" [ 1070.066368] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1070.068159] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the lease: (returnval){ [ 1070.068159] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5c460-fcea-fae9-8808-d81505756da2" [ 1070.068159] env[61545]: _type = "HttpNfcLease" [ 1070.068159] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1070.075858] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1070.075858] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5c460-fcea-fae9-8808-d81505756da2" [ 1070.075858] env[61545]: _type = "HttpNfcLease" [ 1070.075858] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1070.117035] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ea11e2c-cd44-49d1-b0f3-4b46f2641e3f req-671a7240-f146-4644-8102-4ade828b451d service nova] Releasing lock "refresh_cache-4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.146370] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523db906-536c-4c9e-800d-a00b4ca1a683, 'name': SearchDatastore_Task, 'duration_secs': 0.039165} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.146370] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.146370] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] db2d0e21-f6bb-4f61-8d54-e9191de13a59/db2d0e21-f6bb-4f61-8d54-e9191de13a59.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1070.148023] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68ced60c-dd1b-4ced-9719-619cc0fa9e87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.158018] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1070.158018] env[61545]: value = "task-4256433" [ 1070.158018] env[61545]: _type = "Task" [ 1070.158018] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.172468] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256433, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.305694] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4dfe9755-61d5-4ef3-8a7e-d440f833e22d tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 32.718s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.308241] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.574s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.308241] env[61545]: INFO nova.compute.manager [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Unshelving [ 1070.353858] env[61545]: DEBUG oslo_vmware.api [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256431, 'name': PowerOffVM_Task, 'duration_secs': 0.197111} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.354248] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1070.354510] env[61545]: DEBUG nova.compute.manager [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.355482] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a646454-a064-4ea3-86e1-f9cd4b9a3822 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.581051] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1070.581051] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5c460-fcea-fae9-8808-d81505756da2" [ 1070.581051] env[61545]: _type = "HttpNfcLease" [ 1070.581051] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1070.680437] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256433, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.740228] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04280799-3492-4e14-bddb-e611ddd0be20 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.750544] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06517466-9bff-42c7-b46e-68e9146455b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.797706] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e35990a-58c7-4ef7-b694-ca492834b291 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.806428] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d63b51d-3bf4-4be9-b336-9e2f61989bd3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.826821] env[61545]: DEBUG nova.compute.provider_tree [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.876068] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ca566dc0-01fa-4098-bd13-b358ea5ddf2b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.072s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.080340] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1071.080340] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5c460-fcea-fae9-8808-d81505756da2" [ 1071.080340] env[61545]: _type = "HttpNfcLease" [ 1071.080340] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1071.080657] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1071.080657] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5c460-fcea-fae9-8808-d81505756da2" [ 1071.080657] env[61545]: _type = "HttpNfcLease" [ 1071.080657] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1071.081474] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edbc7d8-db62-450c-8154-5853393393af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.093830] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f6149-f645-4867-c406-a25d0225f8f5/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1071.094063] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f6149-f645-4867-c406-a25d0225f8f5/disk-0.vmdk. {{(pid=61545) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1071.164053] env[61545]: DEBUG nova.compute.manager [req-63d788eb-94ea-45c0-a2c5-db0cb1232583 req-a6179403-a247-4c64-8b98-a682fe7daef3 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Received event network-vif-deleted-b8a4b8ef-6552-4028-a715-5b4022737ea0 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1071.164343] env[61545]: INFO nova.compute.manager [req-63d788eb-94ea-45c0-a2c5-db0cb1232583 req-a6179403-a247-4c64-8b98-a682fe7daef3 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Neutron deleted interface b8a4b8ef-6552-4028-a715-5b4022737ea0; detaching it from the instance and deleting it from the info cache [ 1071.165021] env[61545]: DEBUG nova.network.neutron [req-63d788eb-94ea-45c0-a2c5-db0cb1232583 req-a6179403-a247-4c64-8b98-a682fe7daef3 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.177156] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-db6a65c8-1c49-46a6-8540-a5a5b8ff0f13 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.179249] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256433, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556278} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.181096] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] db2d0e21-f6bb-4f61-8d54-e9191de13a59/db2d0e21-f6bb-4f61-8d54-e9191de13a59.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1071.181337] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1071.181977] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4997f6c5-76f7-4a2c-94a1-567f7cf69dab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.187916] env[61545]: DEBUG nova.network.neutron [-] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.194025] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1071.194025] env[61545]: value = "task-4256434" [ 1071.194025] env[61545]: _type = "Task" [ 1071.194025] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.205430] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256434, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.332354] env[61545]: DEBUG nova.scheduler.client.report [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.356419] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.433599] env[61545]: DEBUG nova.network.neutron [-] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.468641] env[61545]: DEBUG nova.network.neutron [-] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.669445] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08b6d53c-ccfa-45fe-9d5e-f6e0af211e23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.682869] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f4b746-7e7f-49ef-86f5-8c8e270f086f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.697750] env[61545]: INFO nova.compute.manager [-] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Took 2.02 seconds to deallocate network for instance. [ 1071.718312] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256434, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092958} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.737286] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1071.744217] env[61545]: DEBUG nova.compute.manager [req-63d788eb-94ea-45c0-a2c5-db0cb1232583 req-a6179403-a247-4c64-8b98-a682fe7daef3 service nova] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Detach interface failed, port_id=b8a4b8ef-6552-4028-a715-5b4022737ea0, reason: Instance b33e29cc-fe26-429a-8799-8d790667cc1d could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1071.746969] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7840c840-0170-4563-ab24-3f905104eed1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.778405] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] db2d0e21-f6bb-4f61-8d54-e9191de13a59/db2d0e21-f6bb-4f61-8d54-e9191de13a59.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.785426] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e94e94d4-ec41-410f-9fb4-3acdb29ca8d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.811691] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1071.811691] env[61545]: value = "task-4256435" [ 1071.811691] env[61545]: _type = "Task" [ 1071.811691] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.824528] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquiring lock "7301c541-664f-43ec-8a34-86f38cac22ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.824847] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "7301c541-664f-43ec-8a34-86f38cac22ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.825149] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquiring lock "7301c541-664f-43ec-8a34-86f38cac22ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.825352] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "7301c541-664f-43ec-8a34-86f38cac22ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.825525] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "7301c541-664f-43ec-8a34-86f38cac22ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.827344] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256435, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.828224] env[61545]: INFO nova.compute.manager [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Terminating instance [ 1071.841350] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.047s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.845727] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.027s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.846606] env[61545]: DEBUG nova.objects.instance [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lazy-loading 'resources' on Instance uuid 8d838d3b-32ad-4bb2-839e-6bd81c363447 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.871977] env[61545]: INFO nova.scheduler.client.report [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleted allocations for instance 855904d8-7eb3-405d-9236-ab4ba9b33940 [ 1071.937968] env[61545]: INFO nova.compute.manager [-] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Took 3.09 seconds to deallocate network for instance. [ 1071.974319] env[61545]: INFO nova.compute.manager [-] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Took 3.08 seconds to deallocate network for instance. [ 1072.219863] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.247068] env[61545]: INFO nova.compute.manager [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Rebuilding instance [ 1072.320218] env[61545]: DEBUG nova.compute.manager [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1072.321225] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1c96e9-2001-4438-8024-7b91f22acc10 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.340026] env[61545]: DEBUG nova.compute.manager [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1072.340026] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1072.340026] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256435, 'name': ReconfigVM_Task, 'duration_secs': 0.419225} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.340026] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee81439-6261-4d18-9204-41dadaa2cd6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.343128] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Reconfigured VM instance instance-00000056 to attach disk [datastore2] db2d0e21-f6bb-4f61-8d54-e9191de13a59/db2d0e21-f6bb-4f61-8d54-e9191de13a59.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.343798] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d6e88e1-4808-4ed2-8e03-541048a70776 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.356034] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1072.356034] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dbb94dc-3239-4d4a-acf6-b255bcac3f03 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.356635] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1072.356635] env[61545]: value = "task-4256436" [ 1072.356635] env[61545]: _type = "Task" [ 1072.356635] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.365172] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1072.365172] env[61545]: value = "task-4256437" [ 1072.365172] env[61545]: _type = "Task" [ 1072.365172] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.379895] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256436, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.381406] env[61545]: DEBUG oslo_concurrency.lockutils [None req-01a93ec0-9ada-4dd7-8866-2dbe2aa5eee5 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "855904d8-7eb3-405d-9236-ab4ba9b33940" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.012s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.392740] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.444924] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.445606] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Completed reading data from the image iterator. {{(pid=61545) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1072.445810] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f6149-f645-4867-c406-a25d0225f8f5/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1072.446816] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06db4bc-0b28-4ee8-a3d8-4a561a132b0b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.455145] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f6149-f645-4867-c406-a25d0225f8f5/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1072.455376] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f6149-f645-4867-c406-a25d0225f8f5/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1072.455829] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-dda45d3f-d0e6-4a55-953d-a739c4929f40 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.485910] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.685211] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f6149-f645-4867-c406-a25d0225f8f5/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1072.685457] env[61545]: INFO nova.virt.vmwareapi.images [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Downloaded image file data 5bc7010a-ee45-48d1-87c9-e47216be1200 [ 1072.686688] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434f7ee8-9141-45b3-9e6a-f55424d49a45 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.705967] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4fbee95-fdae-449c-8825-2d3881a205fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.708572] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9162a7-abc0-46fa-bf82-4a3a167b34c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.718907] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f902e7b-643f-464b-9849-7b1fa66c2cbf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.757274] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec5c614-d78c-4731-8b7a-191f7285f117 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.760317] env[61545]: INFO nova.virt.vmwareapi.images [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] The imported VM was unregistered [ 1072.762738] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Caching image {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1072.763063] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating directory with path [datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.763235] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0d706e9-6cee-4c2b-b734-7bf5ae342567 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.771517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febad56e-05dd-444a-b67a-36c15d299403 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.776853] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Created directory with path [datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.777102] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d/OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d.vmdk to [datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200/5bc7010a-ee45-48d1-87c9-e47216be1200.vmdk. {{(pid=61545) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1072.777816] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bc24dac6-52fb-489f-bdb7-46e9e71d4dbf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.788526] env[61545]: DEBUG nova.compute.provider_tree [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.795482] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1072.795482] env[61545]: value = "task-4256439" [ 1072.795482] env[61545]: _type = "Task" [ 1072.795482] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.807469] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256439, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.873413] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256436, 'name': Rename_Task, 'duration_secs': 0.182848} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.873783] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.874412] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69915264-a1bc-468a-a5fa-f454f418a865 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.879600] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256437, 'name': PowerOffVM_Task, 'duration_secs': 0.22935} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.880296] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.880474] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1072.880740] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6efefad4-573e-4cf7-86c7-45689d594976 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.885827] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1072.885827] env[61545]: value = "task-4256440" [ 1072.885827] env[61545]: _type = "Task" [ 1072.885827] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.895550] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.959752] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1072.960036] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1072.960277] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Deleting the datastore file [datastore2] 7301c541-664f-43ec-8a34-86f38cac22ab {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.960610] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-787f3160-5db9-4540-aaf6-4b17fd13f8cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.972016] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for the task: (returnval){ [ 1072.972016] env[61545]: value = "task-4256442" [ 1072.972016] env[61545]: _type = "Task" [ 1072.972016] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.982845] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.228814] env[61545]: DEBUG nova.compute.manager [req-80b23daa-d371-4e2a-b6ad-ea86e2281dde req-c55ef9b5-a0b1-467f-9386-d00ff5a33794 service nova] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Received event network-vif-deleted-a2f62a8a-fffb-45a0-9cc8-4b9bb01c8fd6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1073.229066] env[61545]: DEBUG nova.compute.manager [req-80b23daa-d371-4e2a-b6ad-ea86e2281dde req-c55ef9b5-a0b1-467f-9386-d00ff5a33794 service nova] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Received event network-vif-deleted-5fdf9f41-2fb1-4e55-9ee1-90954aee9f9f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1073.291723] env[61545]: DEBUG nova.scheduler.client.report [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.309807] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256439, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.313307] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.313307] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.345991] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1073.346646] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afac8e21-e423-4188-ab6e-9e53521b5bb5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.358224] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1073.358224] env[61545]: value = "task-4256443" [ 1073.358224] env[61545]: _type = "Task" [ 1073.358224] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.372899] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1073.373244] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.374625] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f414e9e-0f14-4739-b0a4-13823d934e12 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.384147] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1073.384940] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f33808de-1a2e-4635-952c-478940e6e1c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.401397] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256440, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.483639] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.785160] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1073.785160] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1073.785160] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleting the datastore file [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.785160] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90b42975-2e2e-422e-bcc9-18c438f7df3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.793148] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1073.793148] env[61545]: value = "task-4256445" [ 1073.793148] env[61545]: _type = "Task" [ 1073.793148] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.800945] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.955s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.808979] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.059s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.809283] env[61545]: DEBUG nova.objects.instance [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'resources' on Instance uuid 8b9c45a7-3574-47c9-b46b-34eed554fdc8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.820128] env[61545]: INFO nova.compute.manager [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Detaching volume d618a534-2f80-4250-a05c-4b8bb6a7e98a [ 1073.822856] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.827478] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256439, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.837658] env[61545]: INFO nova.scheduler.client.report [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted allocations for instance 8d838d3b-32ad-4bb2-839e-6bd81c363447 [ 1073.874071] env[61545]: INFO nova.virt.block_device [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Attempting to driver detach volume d618a534-2f80-4250-a05c-4b8bb6a7e98a from mountpoint /dev/sdb [ 1073.874071] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1073.874071] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838743', 'volume_id': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a', 'name': 'volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b1277c3b-cd7b-43be-9eff-640145dde5e5', 'attached_at': '', 'detached_at': '', 'volume_id': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a', 'serial': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1073.874802] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d548a4-f8ae-422a-b233-801b441d16e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.903839] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15ec710-5fb9-4c73-91bf-3aac86615cfa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.913516] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256440, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.918330] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47db2bcc-2ac6-40cb-ba28-824cfdc17f72 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.953290] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4925c446-7e56-4b78-942e-36b4f414fbd0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.978115] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] The volume has not been displaced from its original location: [datastore1] volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a/volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1073.986673] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Reconfiguring VM instance instance-0000003d to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1073.990588] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cf003b8-8f99-41a1-87ca-8baae6adbe0b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.011404] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.013216] env[61545]: DEBUG oslo_vmware.api [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1074.013216] env[61545]: value = "task-4256446" [ 1074.013216] env[61545]: _type = "Task" [ 1074.013216] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.024100] env[61545]: DEBUG oslo_vmware.api [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.307780] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.316119] env[61545]: DEBUG nova.objects.instance [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'numa_topology' on Instance uuid 8b9c45a7-3574-47c9-b46b-34eed554fdc8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.316972] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256439, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.351318] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e591780f-f19d-42b1-9646-2a2e04c335ea tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "8d838d3b-32ad-4bb2-839e-6bd81c363447" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.077s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.409922] env[61545]: DEBUG oslo_vmware.api [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256440, 'name': PowerOnVM_Task, 'duration_secs': 1.171406} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.410320] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1074.410535] env[61545]: INFO nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Took 14.59 seconds to spawn the instance on the hypervisor. [ 1074.410718] env[61545]: DEBUG nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1074.411563] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b47e438-e896-4e12-b4ab-5f944f28d500 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.492677] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.526023] env[61545]: DEBUG oslo_vmware.api [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256446, 'name': ReconfigVM_Task, 'duration_secs': 0.497154} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.526023] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Reconfigured VM instance instance-0000003d to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1074.530326] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e2bfdc9-49c7-4867-a484-788c57aa768c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.550758] env[61545]: DEBUG oslo_vmware.api [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1074.550758] env[61545]: value = "task-4256447" [ 1074.550758] env[61545]: _type = "Task" [ 1074.550758] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.562091] env[61545]: DEBUG oslo_vmware.api [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.809277] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.815365] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256439, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.818027] env[61545]: DEBUG nova.objects.base [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Object Instance<8b9c45a7-3574-47c9-b46b-34eed554fdc8> lazy-loaded attributes: resources,numa_topology {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1074.939795] env[61545]: INFO nova.compute.manager [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Took 34.82 seconds to build instance. [ 1074.995025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquiring lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.995025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.000876] env[61545]: DEBUG oslo_vmware.api [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Task: {'id': task-4256442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.960947} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.004944] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.005178] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.005392] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.005609] env[61545]: INFO nova.compute.manager [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Took 2.67 seconds to destroy the instance on the hypervisor. [ 1075.005868] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1075.008177] env[61545]: DEBUG nova.compute.manager [-] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1075.008177] env[61545]: DEBUG nova.network.neutron [-] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1075.047968] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.048385] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.049047] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.049047] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.049284] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.052016] env[61545]: INFO nova.compute.manager [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Terminating instance [ 1075.067692] env[61545]: DEBUG oslo_vmware.api [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256447, 'name': ReconfigVM_Task, 'duration_secs': 0.238031} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.070959] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838743', 'volume_id': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a', 'name': 'volume-d618a534-2f80-4250-a05c-4b8bb6a7e98a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b1277c3b-cd7b-43be-9eff-640145dde5e5', 'attached_at': '', 'detached_at': '', 'volume_id': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a', 'serial': 'd618a534-2f80-4250-a05c-4b8bb6a7e98a'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1075.155676] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eca4969-fce4-4b5f-84b1-7a0c287c2176 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.165630] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbaa227-5abb-431e-b146-f549afe45889 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.209858] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060630e0-bf4a-4b52-8ca9-5fb7327d8ba5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.221020] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc35b9f-8a58-4e60-9175-b03adb6f6353 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.236326] env[61545]: DEBUG nova.compute.provider_tree [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.311825] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.315586] env[61545]: DEBUG nova.compute.manager [req-1fbd2048-102b-415a-b542-3ffa6774fa31 req-5aae077d-953b-4257-bc96-2a8f5840295f service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Received event network-vif-deleted-c9965f3c-2499-49d5-ba57-a093571cfc9e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1075.315842] env[61545]: INFO nova.compute.manager [req-1fbd2048-102b-415a-b542-3ffa6774fa31 req-5aae077d-953b-4257-bc96-2a8f5840295f service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Neutron deleted interface c9965f3c-2499-49d5-ba57-a093571cfc9e; detaching it from the instance and deleting it from the info cache [ 1075.316221] env[61545]: DEBUG nova.network.neutron [req-1fbd2048-102b-415a-b542-3ffa6774fa31 req-5aae077d-953b-4257-bc96-2a8f5840295f service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.322755] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256439, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.443990] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e68c523f-c363-4a21-86d0-41d49cfbcbc6 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.360s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.505789] env[61545]: DEBUG nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1075.561422] env[61545]: DEBUG nova.compute.manager [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1075.561802] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.562881] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6d59c2-6e24-4918-8a4c-b68d2185df68 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.571174] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.571453] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-228a668d-1c5b-4df6-8bf6-dfb6c8badd04 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.578269] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1075.578269] env[61545]: value = "task-4256448" [ 1075.578269] env[61545]: _type = "Task" [ 1075.578269] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.597570] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.637163] env[61545]: DEBUG nova.objects.instance [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'flavor' on Instance uuid b1277c3b-cd7b-43be-9eff-640145dde5e5 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.740387] env[61545]: DEBUG nova.scheduler.client.report [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.784808] env[61545]: DEBUG nova.network.neutron [-] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.809622] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.718423} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.809958] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.810127] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.810351] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.819827] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256439, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.756152} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.820109] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d/OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d.vmdk to [datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200/5bc7010a-ee45-48d1-87c9-e47216be1200.vmdk. [ 1075.820729] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Cleaning up location [datastore2] OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1075.820729] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_dfe55541-fbd4-4cb2-8169-866c850b0b0d {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1075.820729] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a06fe25a-8ff9-46a7-8146-ecaaeb6d953e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.823575] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64cda7ba-2693-4323-af9c-81ca506b7857 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.829275] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1075.829275] env[61545]: value = "task-4256449" [ 1075.829275] env[61545]: _type = "Task" [ 1075.829275] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.835713] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78315883-b00d-4190-bf02-4035d608c745 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.854134] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.870101] env[61545]: DEBUG nova.compute.manager [req-1fbd2048-102b-415a-b542-3ffa6774fa31 req-5aae077d-953b-4257-bc96-2a8f5840295f service nova] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Detach interface failed, port_id=c9965f3c-2499-49d5-ba57-a093571cfc9e, reason: Instance 7301c541-664f-43ec-8a34-86f38cac22ab could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1076.038692] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.089842] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256448, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.247043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.437s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.248481] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.115s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.248631] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.250768] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.895s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.250984] env[61545]: DEBUG nova.objects.instance [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lazy-loading 'pci_requests' on Instance uuid f9c9c447-e676-4143-b329-fb6d71bcd553 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.273912] env[61545]: INFO nova.scheduler.client.report [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted allocations for instance 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67 [ 1076.288131] env[61545]: INFO nova.compute.manager [-] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Took 1.28 seconds to deallocate network for instance. [ 1076.342530] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112821} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.342814] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.342990] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200/5bc7010a-ee45-48d1-87c9-e47216be1200.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.346227] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200/5bc7010a-ee45-48d1-87c9-e47216be1200.vmdk to [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42/4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1076.346227] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc43e71a-2e1a-4748-b6f2-3473c2b90a85 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.353114] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1076.353114] env[61545]: value = "task-4256450" [ 1076.353114] env[61545]: _type = "Task" [ 1076.353114] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.370153] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256450, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.592492] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256448, 'name': PowerOffVM_Task, 'duration_secs': 0.513381} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.592967] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.593252] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.593614] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89bac9f3-ea05-497f-ac85-04ccb1dcdbb5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.644551] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6dd47a34-5931-4418-b485-d89dab8de4ae tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.331s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.693076] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.693222] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.693464] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleting the datastore file [datastore2] 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.694204] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcf21abf-5ade-4624-88c1-1171610e36b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.702868] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for the task: (returnval){ [ 1076.702868] env[61545]: value = "task-4256452" [ 1076.702868] env[61545]: _type = "Task" [ 1076.702868] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.715670] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.756461] env[61545]: DEBUG nova.objects.instance [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lazy-loading 'numa_topology' on Instance uuid f9c9c447-e676-4143-b329-fb6d71bcd553 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.763235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dcf3fce9-24c1-423e-bfe8-29338d69e275 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 34.391s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.763235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 8.620s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.763235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.763235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.763235] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.766964] env[61545]: INFO nova.compute.manager [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Terminating instance [ 1076.784588] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8e80716a-e392-4eac-b697-1762c6ff119a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "1be4da80-c9ee-424e-b4e3-bdd22eb0cd67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.162s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.796957] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.863202] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1076.863596] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.863791] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1076.864086] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.864279] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1076.864494] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1076.864827] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1076.865048] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1076.865297] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1076.865537] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1076.865789] env[61545]: DEBUG nova.virt.hardware [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1076.867119] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f7190f-3383-4a31-8332-b5daa8a11b3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.882052] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256450, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.882397] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9dfd65-4f6e-4c99-b3ab-0ddde6f55dfb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.899361] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:f2:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a70c23b-fb28-4302-add1-42b019c5c4f7', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.908324] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1076.908706] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1076.908972] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-823f11b9-58ff-4d3f-90d6-8841004f1327 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.931017] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.931017] env[61545]: value = "task-4256453" [ 1076.931017] env[61545]: _type = "Task" [ 1076.931017] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.947500] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256453, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.968366] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.968670] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.968940] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "b1277c3b-cd7b-43be-9eff-640145dde5e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.969161] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.969336] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.971636] env[61545]: INFO nova.compute.manager [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Terminating instance [ 1077.035676] env[61545]: DEBUG nova.compute.manager [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1077.036654] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dc19ed-4da6-4af5-98ba-7823f18fd8ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.213923] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.260361] env[61545]: INFO nova.compute.claims [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.275284] env[61545]: DEBUG nova.compute.manager [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1077.275725] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.276379] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66274d80-a922-496a-9f03-5f394c0abc9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.288510] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4711e240-edef-4c78-ad3a-abf7f6f9d795 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.326202] env[61545]: WARNING nova.virt.vmwareapi.vmops [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8b9c45a7-3574-47c9-b46b-34eed554fdc8 could not be found. [ 1077.326558] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.326809] env[61545]: INFO nova.compute.manager [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1077.327121] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1077.327468] env[61545]: DEBUG nova.compute.manager [-] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1077.327609] env[61545]: DEBUG nova.network.neutron [-] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1077.365111] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256450, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.442774] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256453, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.475896] env[61545]: DEBUG nova.compute.manager [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1077.476352] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.477457] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1a4a5b-bbda-45c1-9377-81a6c053c975 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.486937] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1077.487216] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-377ffbf5-de65-491c-bf25-32eab4ba2bd3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.495738] env[61545]: DEBUG oslo_vmware.api [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1077.495738] env[61545]: value = "task-4256454" [ 1077.495738] env[61545]: _type = "Task" [ 1077.495738] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.508318] env[61545]: DEBUG oslo_vmware.api [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.548996] env[61545]: INFO nova.compute.manager [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] instance snapshotting [ 1077.552117] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5efb66-fad8-4c82-8dc5-a0a87c484003 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.577304] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372a1401-5682-4096-ac7d-fdf80acb9fda {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.722582] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.865124] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256450, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.945051] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256453, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.005980] env[61545]: DEBUG oslo_vmware.api [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256454, 'name': PowerOffVM_Task, 'duration_secs': 0.507878} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.007174] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1078.007174] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1078.007174] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0f2c2dd-2378-4df9-a7ce-a895eab64d9b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.091140] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1078.091556] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fb93138c-86fe-493e-9a48-12caf862af1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.101214] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1078.101214] env[61545]: value = "task-4256456" [ 1078.101214] env[61545]: _type = "Task" [ 1078.101214] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.113409] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256456, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.151688] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1078.151891] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1078.152144] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleting the datastore file [datastore2] b1277c3b-cd7b-43be-9eff-640145dde5e5 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.152483] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5cc68789-a467-4053-a560-a860992e19ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.160350] env[61545]: DEBUG oslo_vmware.api [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1078.160350] env[61545]: value = "task-4256457" [ 1078.160350] env[61545]: _type = "Task" [ 1078.160350] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.171546] env[61545]: DEBUG oslo_vmware.api [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.215143] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.336099] env[61545]: DEBUG nova.network.neutron [-] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.370819] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256450, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.447191] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256453, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.563655] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e78d5e-1732-49c3-b512-3cda61306b66 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.573535] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dd5d3d-8885-4508-b057-567fb5f8affe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.614360] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9763968-e226-4a5e-8920-fc5953da652b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.626741] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256456, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.628374] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3199be0e-49c9-4c21-84b3-94617ca30286 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.645551] env[61545]: DEBUG nova.compute.provider_tree [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.672347] env[61545]: DEBUG oslo_vmware.api [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.687089] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.687349] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.716676] env[61545]: DEBUG oslo_vmware.api [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Task: {'id': task-4256452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.823725} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.717071] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1078.717326] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1078.717744] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1078.717999] env[61545]: INFO nova.compute.manager [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Took 3.16 seconds to destroy the instance on the hypervisor. [ 1078.718320] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1078.719031] env[61545]: DEBUG nova.compute.manager [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1078.719247] env[61545]: DEBUG nova.network.neutron [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1078.839516] env[61545]: INFO nova.compute.manager [-] [instance: 8b9c45a7-3574-47c9-b46b-34eed554fdc8] Took 1.51 seconds to deallocate network for instance. [ 1078.869052] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256450, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.948137] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256453, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.120884] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256456, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.149245] env[61545]: DEBUG nova.scheduler.client.report [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1079.172800] env[61545]: DEBUG oslo_vmware.api [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.980868} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.173066] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.173257] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1079.173433] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1079.173611] env[61545]: INFO nova.compute.manager [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1079.173922] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1079.174050] env[61545]: DEBUG nova.compute.manager [-] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1079.174171] env[61545]: DEBUG nova.network.neutron [-] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1079.190032] env[61545]: DEBUG nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1079.367505] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256450, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.721916} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.374021] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/5bc7010a-ee45-48d1-87c9-e47216be1200/5bc7010a-ee45-48d1-87c9-e47216be1200.vmdk to [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42/4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1079.374021] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2cd98ec-efb1-42a2-ac6c-8a75515d1de7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.396182] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42/4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1079.396503] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56f6924d-264a-4109-81ba-fa2613721aa1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.417945] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1079.417945] env[61545]: value = "task-4256458" [ 1079.417945] env[61545]: _type = "Task" [ 1079.417945] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.432035] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.448181] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256453, 'name': CreateVM_Task, 'duration_secs': 2.13058} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.451499] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1079.451499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.451499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.451499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1079.451499] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dbf385a-2651-4705-bdbf-20e2d4e83d24 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.455501] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1079.455501] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524eaaee-df99-9eb8-c7fb-ee8de3e6c940" [ 1079.455501] env[61545]: _type = "Task" [ 1079.455501] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.464217] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524eaaee-df99-9eb8-c7fb-ee8de3e6c940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.513682] env[61545]: DEBUG nova.network.neutron [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.622080] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256456, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.659034] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.406s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.659788] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.439s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.660272] env[61545]: DEBUG nova.objects.instance [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lazy-loading 'resources' on Instance uuid b33e29cc-fe26-429a-8799-8d790667cc1d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.714283] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.748477] env[61545]: INFO nova.network.neutron [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating port 2147b830-281d-4a24-90d1-22eccefc4c5c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1079.874437] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e1ccb8ac-005a-45d0-be85-e083ce773c37 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "8b9c45a7-3574-47c9-b46b-34eed554fdc8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.112s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.930752] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.967533] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524eaaee-df99-9eb8-c7fb-ee8de3e6c940, 'name': SearchDatastore_Task, 'duration_secs': 0.011981} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.967779] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.968052] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1079.968300] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.968506] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.968768] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.969393] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-939eba4f-03b4-4e90-ac67-df12ff442e67 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.979843] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.979971] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1079.980833] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-775b142c-8c63-4e39-94b0-f33e0aa912e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.987062] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1079.987062] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f064e0-d8dd-f584-3135-dbcc23672654" [ 1079.987062] env[61545]: _type = "Task" [ 1079.987062] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.996226] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f064e0-d8dd-f584-3135-dbcc23672654, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.015939] env[61545]: INFO nova.compute.manager [-] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Took 1.30 seconds to deallocate network for instance. [ 1080.123178] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256456, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.334488] env[61545]: DEBUG nova.compute.manager [req-20273446-47c5-4733-94bf-53f38b9202e3 req-8f115437-304c-4a32-a907-6f114879f46f service nova] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Received event network-vif-deleted-d289bc06-c77a-460a-b15d-e94dcfb3ff53 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1080.334553] env[61545]: DEBUG nova.compute.manager [req-20273446-47c5-4733-94bf-53f38b9202e3 req-8f115437-304c-4a32-a907-6f114879f46f service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Received event network-vif-deleted-bf240544-0938-4b0c-9469-4d66bd37ee14 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1080.335090] env[61545]: INFO nova.compute.manager [req-20273446-47c5-4733-94bf-53f38b9202e3 req-8f115437-304c-4a32-a907-6f114879f46f service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Neutron deleted interface bf240544-0938-4b0c-9469-4d66bd37ee14; detaching it from the instance and deleting it from the info cache [ 1080.335933] env[61545]: DEBUG nova.network.neutron [req-20273446-47c5-4733-94bf-53f38b9202e3 req-8f115437-304c-4a32-a907-6f114879f46f service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.433587] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256458, 'name': ReconfigVM_Task, 'duration_secs': 0.917439} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.433954] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42/4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.435474] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'boot_index': 0, 'encryption_format': None, 'encrypted': False, 'device_type': 'disk', 'guest_format': None, 'size': 0, 'encryption_options': None, 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'image_id': 'bf68eb43-6d66-4532-9eb1-af7d78faa698'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'delete_on_termination': False, 'boot_index': None, 'device_type': None, 'attachment_id': '80cbdd79-308c-4e76-ad16-ca2f9ac0f4c8', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838777', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'name': 'volume-9009d07b-81b0-4ba0-ae46-44590740ed11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42', 'attached_at': '', 'detached_at': '', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'serial': '9009d07b-81b0-4ba0-ae46-44590740ed11'}, 'guest_format': None, 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=61545) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1080.436800] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1080.437047] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838777', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'name': 'volume-9009d07b-81b0-4ba0-ae46-44590740ed11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42', 'attached_at': '', 'detached_at': '', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'serial': '9009d07b-81b0-4ba0-ae46-44590740ed11'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1080.437991] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80085e6-c27d-49e6-9268-cb82a709a310 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.460728] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74972788-7182-4d3a-9a67-0cb8bc971df9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.488586] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] volume-9009d07b-81b0-4ba0-ae46-44590740ed11/volume-9009d07b-81b0-4ba0-ae46-44590740ed11.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.492512] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08debd50-cf78-46ce-855e-3ad5cf256c3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.521598] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f064e0-d8dd-f584-3135-dbcc23672654, 'name': SearchDatastore_Task, 'duration_secs': 0.0101} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.524064] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.524597] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1080.524597] env[61545]: value = "task-4256459" [ 1080.524597] env[61545]: _type = "Task" [ 1080.524597] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.524834] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c55d93d4-57b8-46b0-9c13-f27d491f07a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.534968] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1080.534968] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524fda15-cfa6-2a29-738f-051fc466521b" [ 1080.534968] env[61545]: _type = "Task" [ 1080.534968] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.538391] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256459, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.543502] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04f1a48-35a2-48d9-94ae-3bc4b89fd050 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.546615] env[61545]: DEBUG nova.network.neutron [-] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.556925] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524fda15-cfa6-2a29-738f-051fc466521b, 'name': SearchDatastore_Task, 'duration_secs': 0.011459} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.559194] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.559485] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e/2f8567b1-7291-4705-8ef3-23547eb4860e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1080.559912] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a71c48f3-b789-4e23-a4e3-a85e73dc99c3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.562677] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f48e8e-c13b-4350-9d9d-6de824ab8225 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.596904] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1080.596904] env[61545]: value = "task-4256460" [ 1080.596904] env[61545]: _type = "Task" [ 1080.596904] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.597647] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ff46d5-3e90-47aa-8a5d-ee4c2a2f3f55 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.611719] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eede4ff-fe7f-4578-a150-1ebde823619c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.616101] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.631331] env[61545]: DEBUG nova.compute.provider_tree [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.636122] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256456, 'name': CreateSnapshot_Task, 'duration_secs': 2.077044} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.636400] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1080.637422] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662d7cf1-6092-4593-b42f-1ae66281f04e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.825760] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "79762f13-2f93-43ba-883b-9437c7732c04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.825980] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "79762f13-2f93-43ba-883b-9437c7732c04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.839670] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47180de3-23b9-46ea-915e-faddfac8f88e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.851273] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0565b6-1cf5-4a41-8378-fd3922c36329 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.885356] env[61545]: DEBUG nova.compute.manager [req-20273446-47c5-4733-94bf-53f38b9202e3 req-8f115437-304c-4a32-a907-6f114879f46f service nova] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Detach interface failed, port_id=bf240544-0938-4b0c-9469-4d66bd37ee14, reason: Instance b1277c3b-cd7b-43be-9eff-640145dde5e5 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1081.040482] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256459, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.058557] env[61545]: INFO nova.compute.manager [-] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Took 1.88 seconds to deallocate network for instance. [ 1081.110891] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.111220] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e/2f8567b1-7291-4705-8ef3-23547eb4860e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1081.111441] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1081.111700] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-390ca316-2409-4a5a-8527-43b987147135 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.119122] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1081.119122] env[61545]: value = "task-4256461" [ 1081.119122] env[61545]: _type = "Task" [ 1081.119122] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.128119] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256461, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.138016] env[61545]: DEBUG nova.scheduler.client.report [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1081.161007] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1081.161675] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f65b30bd-f0f3-4237-9879-55abd2dc20de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.171152] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1081.171152] env[61545]: value = "task-4256462" [ 1081.171152] env[61545]: _type = "Task" [ 1081.171152] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.179966] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256462, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.328415] env[61545]: DEBUG nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1081.540640] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256459, 'name': ReconfigVM_Task, 'duration_secs': 0.551429} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.541065] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Reconfigured VM instance instance-00000043 to attach disk [datastore1] volume-9009d07b-81b0-4ba0-ae46-44590740ed11/volume-9009d07b-81b0-4ba0-ae46-44590740ed11.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.546789] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c524567d-6021-4b05-bb69-8228509ea2a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.563870] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1081.563870] env[61545]: value = "task-4256463" [ 1081.563870] env[61545]: _type = "Task" [ 1081.563870] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.569218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.569599] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-97b72809-2a1e-4eda-af82-71cac2d79a64-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.569710] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-97b72809-2a1e-4eda-af82-71cac2d79a64-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.570084] env[61545]: DEBUG nova.objects.instance [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'flavor' on Instance uuid 97b72809-2a1e-4eda-af82-71cac2d79a64 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.581582] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256463, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.629419] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256461, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06471} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.629709] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1081.630563] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca05f14-7f51-4055-a64a-edb179247a65 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.649237] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.990s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.660109] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e/2f8567b1-7291-4705-8ef3-23547eb4860e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.660816] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.216s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.661029] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.663275] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.178s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.663509] env[61545]: DEBUG nova.objects.instance [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lazy-loading 'resources' on Instance uuid 1e5be92c-d727-4515-9e16-85ade2719455 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.665201] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62248d4d-1d5c-4afc-bad5-5416028af6fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.688683] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.688944] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.689083] env[61545]: DEBUG nova.network.neutron [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1081.691994] env[61545]: INFO nova.scheduler.client.report [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleted allocations for instance b33e29cc-fe26-429a-8799-8d790667cc1d [ 1081.703822] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1081.703822] env[61545]: value = "task-4256464" [ 1081.703822] env[61545]: _type = "Task" [ 1081.703822] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.708993] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256462, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.710449] env[61545]: INFO nova.scheduler.client.report [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted allocations for instance 844f01ed-4dae-4e13-9d1c-09a73f413201 [ 1081.728701] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256464, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.847655] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.977421] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4576a66a-a8e7-4480-87c7-0bc4565dc961 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.986230] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5711da-ba56-45f1-bf42-ace8c567c195 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.021148] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a22715-b6c8-4ed5-aec3-b9f59bd1939c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.029088] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e17dcab-8d9a-4b23-b0a7-9b4ded4a31c7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.043398] env[61545]: DEBUG nova.compute.provider_tree [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.073996] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256463, 'name': ReconfigVM_Task, 'duration_secs': 0.422462} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.074346] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838777', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'name': 'volume-9009d07b-81b0-4ba0-ae46-44590740ed11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42', 'attached_at': '', 'detached_at': '', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'serial': '9009d07b-81b0-4ba0-ae46-44590740ed11'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1082.075013] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6e83b63-4e46-4cc6-9914-8d0c31c32df1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.081966] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1082.081966] env[61545]: value = "task-4256465" [ 1082.081966] env[61545]: _type = "Task" [ 1082.081966] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.094544] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256465, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.182610] env[61545]: DEBUG nova.objects.instance [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'pci_requests' on Instance uuid 97b72809-2a1e-4eda-af82-71cac2d79a64 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.206636] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256462, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.210690] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1006fa68-a3a6-477f-8610-d9c5684c8efa tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "b33e29cc-fe26-429a-8799-8d790667cc1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.201s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.221211] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256464, 'name': ReconfigVM_Task, 'duration_secs': 0.315839} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.221526] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e/2f8567b1-7291-4705-8ef3-23547eb4860e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1082.225140] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d20fc75-37bf-453a-8610-0925d87463ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.226394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1628239e-7f98-43fd-8b98-24247623e207 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "844f01ed-4dae-4e13-9d1c-09a73f413201" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.134s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.233314] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1082.233314] env[61545]: value = "task-4256466" [ 1082.233314] env[61545]: _type = "Task" [ 1082.233314] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.246627] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256466, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.435484] env[61545]: DEBUG nova.compute.manager [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received event network-vif-plugged-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1082.435704] env[61545]: DEBUG oslo_concurrency.lockutils [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] Acquiring lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.435915] env[61545]: DEBUG oslo_concurrency.lockutils [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.436091] env[61545]: DEBUG oslo_concurrency.lockutils [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.436261] env[61545]: DEBUG nova.compute.manager [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] No waiting events found dispatching network-vif-plugged-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.436438] env[61545]: WARNING nova.compute.manager [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received unexpected event network-vif-plugged-2147b830-281d-4a24-90d1-22eccefc4c5c for instance with vm_state shelved_offloaded and task_state spawning. [ 1082.436625] env[61545]: DEBUG nova.compute.manager [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received event network-changed-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1082.436833] env[61545]: DEBUG nova.compute.manager [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Refreshing instance network info cache due to event network-changed-2147b830-281d-4a24-90d1-22eccefc4c5c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1082.437018] env[61545]: DEBUG oslo_concurrency.lockutils [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] Acquiring lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.469486] env[61545]: DEBUG nova.network.neutron [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating instance_info_cache with network_info: [{"id": "2147b830-281d-4a24-90d1-22eccefc4c5c", "address": "fa:16:3e:57:07:3e", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2147b830-28", "ovs_interfaceid": "2147b830-281d-4a24-90d1-22eccefc4c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.546738] env[61545]: DEBUG nova.scheduler.client.report [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.593842] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256465, 'name': Rename_Task, 'duration_secs': 0.356779} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.594156] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1082.594451] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-939bf5b0-d352-418e-9945-176783397768 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.603248] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1082.603248] env[61545]: value = "task-4256467" [ 1082.603248] env[61545]: _type = "Task" [ 1082.603248] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.612523] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.685120] env[61545]: DEBUG nova.objects.base [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Object Instance<97b72809-2a1e-4eda-af82-71cac2d79a64> lazy-loaded attributes: flavor,pci_requests {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1082.685334] env[61545]: DEBUG nova.network.neutron [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1082.702426] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256462, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.725328] env[61545]: DEBUG nova.policy [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1082.743675] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256466, 'name': Rename_Task, 'duration_secs': 0.162405} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.744058] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1082.744351] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23ac0851-374c-4548-b0e8-00f5c6d0cec6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.752622] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1082.752622] env[61545]: value = "task-4256468" [ 1082.752622] env[61545]: _type = "Task" [ 1082.752622] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.769450] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256468, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.974575] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.979093] env[61545]: DEBUG oslo_concurrency.lockutils [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] Acquired lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.980367] env[61545]: DEBUG nova.network.neutron [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Refreshing network info cache for port 2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.012255] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='f5b338cdc4d1d1af0d87c17e2d7de4ef',container_format='bare',created_at=2025-06-03T12:53:13Z,direct_url=,disk_format='vmdk',id=623bd67d-a8b3-4e1b-9169-f7cd1bc393b7,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1942833574-shelved',owner='a0f97aab169448c5a0d956b1b33e1ac2',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2025-06-03T12:53:29Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.012665] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.012902] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.013175] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.013378] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.013575] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.013849] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.015327] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.015636] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.015885] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.016146] env[61545]: DEBUG nova.virt.hardware [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.017937] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fc073e-7dc5-4d22-9832-7034a6b6b079 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.032480] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c553abf-43d6-40b8-b68e-d69e35437232 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.055121] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.392s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.058905] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:07:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '557aba95-8968-407a-bac2-2fae66f7c8e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2147b830-281d-4a24-90d1-22eccefc4c5c', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.068249] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.069185] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.031s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.071446] env[61545]: INFO nova.compute.claims [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.074325] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.075222] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e3f7999-98de-45a3-b2ee-ab276fc23f0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.096986] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.096986] env[61545]: value = "task-4256469" [ 1083.096986] env[61545]: _type = "Task" [ 1083.096986] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.098164] env[61545]: INFO nova.scheduler.client.report [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleted allocations for instance 1e5be92c-d727-4515-9e16-85ade2719455 [ 1083.115246] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256469, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.122013] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256467, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.180140] env[61545]: DEBUG nova.network.neutron [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Successfully created port: 281269ec-aad9-4fd1-bf3e-ba8f1bc9923f {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1083.203251] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256462, 'name': CloneVM_Task, 'duration_secs': 1.934027} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.203394] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Created linked-clone VM from snapshot [ 1083.204249] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c80a92c-75bd-48e2-91fc-92d07d3416b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.213731] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Uploading image c9e2dc03-db2f-475a-873a-e576bc26dc1f {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1083.227532] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1083.227989] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5e6dc564-1f61-4977-9889-7683ad1f58b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.237370] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1083.237370] env[61545]: value = "task-4256470" [ 1083.237370] env[61545]: _type = "Task" [ 1083.237370] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.250263] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256470, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.267956] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256468, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.615867] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256469, 'name': CreateVM_Task, 'duration_secs': 0.396135} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.616426] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38aa929c-b978-4ad4-8c55-0ed0ec1c56ed tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "1e5be92c-d727-4515-9e16-85ade2719455" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.432s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.617413] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.618492] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.618681] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.619076] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.619566] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2460a905-93bb-45fc-a712-a6054bc1b668 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.625330] env[61545]: DEBUG oslo_vmware.api [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256467, 'name': PowerOnVM_Task, 'duration_secs': 0.687754} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.625981] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1083.629757] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1083.629757] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f17f0c-8df0-2f80-2b29-455a9e5f114e" [ 1083.629757] env[61545]: _type = "Task" [ 1083.629757] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.647072] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.647371] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Processing image 623bd67d-a8b3-4e1b-9169-f7cd1bc393b7 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.648761] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.648761] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.648761] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.648761] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc53d4e0-2d76-4392-9a09-7cdeac419784 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.671819] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.672101] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1083.672980] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3834dd37-4d11-4098-a23a-5c5f91209e34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.680325] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1083.680325] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5202208a-99af-a491-da1b-9e0eb4ade6aa" [ 1083.680325] env[61545]: _type = "Task" [ 1083.680325] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.696374] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Preparing fetch location {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1083.696710] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Fetch image to [datastore2] OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024/OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024.vmdk {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1083.696912] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Downloading stream optimized image 623bd67d-a8b3-4e1b-9169-f7cd1bc393b7 to [datastore2] OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024/OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024.vmdk on the data store datastore2 as vApp {{(pid=61545) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1083.697104] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Downloading image file data 623bd67d-a8b3-4e1b-9169-f7cd1bc393b7 to the ESX as VM named 'OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024' {{(pid=61545) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1083.754279] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256470, 'name': Destroy_Task, 'duration_secs': 0.514665} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.755448] env[61545]: DEBUG nova.compute.manager [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1083.759283] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Destroyed the VM [ 1083.759567] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1083.760432] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2af415-21dd-429a-8214-867e95812baf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.765183] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f5bb0b0b-1155-4cdc-b505-eabdb4337b45 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.773692] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256468, 'name': PowerOnVM_Task, 'duration_secs': 0.542029} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.776949] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1083.777246] env[61545]: DEBUG nova.compute.manager [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1083.781486] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1083.781486] env[61545]: value = "task-4256471" [ 1083.781486] env[61545]: _type = "Task" [ 1083.781486] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.785026] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781e362a-c3ed-4bd2-9b4a-ef9dccc6ad6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.795120] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256471, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.801400] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1083.801400] env[61545]: value = "resgroup-9" [ 1083.801400] env[61545]: _type = "ResourcePool" [ 1083.801400] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1083.801602] env[61545]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-9690512b-ef15-44a4-a029-2649489aa6f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.825468] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lease: (returnval){ [ 1083.825468] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523419f5-4d72-b433-a52e-e11d6576012a" [ 1083.825468] env[61545]: _type = "HttpNfcLease" [ 1083.825468] env[61545]: } obtained for vApp import into resource pool (val){ [ 1083.825468] env[61545]: value = "resgroup-9" [ 1083.825468] env[61545]: _type = "ResourcePool" [ 1083.825468] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1083.829061] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the lease: (returnval){ [ 1083.829061] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523419f5-4d72-b433-a52e-e11d6576012a" [ 1083.829061] env[61545]: _type = "HttpNfcLease" [ 1083.829061] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1083.833204] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1083.833204] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523419f5-4d72-b433-a52e-e11d6576012a" [ 1083.833204] env[61545]: _type = "HttpNfcLease" [ 1083.833204] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1083.927288] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "a7967300-6760-4310-bf48-00ddcaac3ee8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.927608] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "a7967300-6760-4310-bf48-00ddcaac3ee8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.137088] env[61545]: DEBUG nova.network.neutron [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updated VIF entry in instance network info cache for port 2147b830-281d-4a24-90d1-22eccefc4c5c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.137532] env[61545]: DEBUG nova.network.neutron [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating instance_info_cache with network_info: [{"id": "2147b830-281d-4a24-90d1-22eccefc4c5c", "address": "fa:16:3e:57:07:3e", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2147b830-28", "ovs_interfaceid": "2147b830-281d-4a24-90d1-22eccefc4c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.294723] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b971124d-c8e3-4751-acc4-816f3cf22ae5 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 41.304s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.301602] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256471, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.320241] env[61545]: INFO nova.compute.manager [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] bringing vm to original state: 'stopped' [ 1084.342039] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1084.342039] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523419f5-4d72-b433-a52e-e11d6576012a" [ 1084.342039] env[61545]: _type = "HttpNfcLease" [ 1084.342039] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1084.347934] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e77c62-565a-4bca-accc-352e2638aa61 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.356659] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7012b9-dbd6-40bb-ac72-ce33d8650552 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.393430] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370ea4a4-0cb6-46d0-9d67-96d4ea8721ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.403042] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c61a381-7f3f-48e9-9d61-ba6d6532d52e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.418872] env[61545]: DEBUG nova.compute.provider_tree [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.429775] env[61545]: DEBUG nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1084.640603] env[61545]: DEBUG oslo_concurrency.lockutils [req-c0242452-f2c1-4e6c-8a8c-4f0ec0740dc9 req-7110d16d-81ba-40fe-929f-f59a56979da7 service nova] Releasing lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.801480] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256471, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.834940] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.835068] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.840378] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1084.840378] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523419f5-4d72-b433-a52e-e11d6576012a" [ 1084.840378] env[61545]: _type = "HttpNfcLease" [ 1084.840378] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1084.841250] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1084.841250] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523419f5-4d72-b433-a52e-e11d6576012a" [ 1084.841250] env[61545]: _type = "HttpNfcLease" [ 1084.841250] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1084.842309] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df2af29-643f-481f-b6b9-f0eab9395568 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.851643] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f878a-1151-826e-0ea5-fa47eca1b20b/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1084.851967] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f878a-1151-826e-0ea5-fa47eca1b20b/disk-0.vmdk. {{(pid=61545) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1084.918925] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d12853c6-aed8-48f9-a8ca-be9aa47d85b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.923775] env[61545]: DEBUG nova.scheduler.client.report [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1084.953318] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.172458] env[61545]: DEBUG nova.compute.manager [req-47c5d2af-2a77-4905-8aff-dd6ec6f79490 req-de8d7d62-596e-4dc0-9506-8a6077a59154 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received event network-vif-plugged-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1085.172721] env[61545]: DEBUG oslo_concurrency.lockutils [req-47c5d2af-2a77-4905-8aff-dd6ec6f79490 req-de8d7d62-596e-4dc0-9506-8a6077a59154 service nova] Acquiring lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.172944] env[61545]: DEBUG oslo_concurrency.lockutils [req-47c5d2af-2a77-4905-8aff-dd6ec6f79490 req-de8d7d62-596e-4dc0-9506-8a6077a59154 service nova] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.173172] env[61545]: DEBUG oslo_concurrency.lockutils [req-47c5d2af-2a77-4905-8aff-dd6ec6f79490 req-de8d7d62-596e-4dc0-9506-8a6077a59154 service nova] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.173351] env[61545]: DEBUG nova.compute.manager [req-47c5d2af-2a77-4905-8aff-dd6ec6f79490 req-de8d7d62-596e-4dc0-9506-8a6077a59154 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] No waiting events found dispatching network-vif-plugged-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1085.173568] env[61545]: WARNING nova.compute.manager [req-47c5d2af-2a77-4905-8aff-dd6ec6f79490 req-de8d7d62-596e-4dc0-9506-8a6077a59154 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received unexpected event network-vif-plugged-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f for instance with vm_state active and task_state None. [ 1085.299335] env[61545]: DEBUG oslo_vmware.api [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256471, 'name': RemoveSnapshot_Task, 'duration_secs': 1.108226} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.299920] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1085.303987] env[61545]: DEBUG nova.network.neutron [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Successfully updated port: 281269ec-aad9-4fd1-bf3e-ba8f1bc9923f {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1085.327734] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "2f8567b1-7291-4705-8ef3-23547eb4860e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.328016] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.328213] env[61545]: DEBUG nova.compute.manager [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.329711] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf1eaee-9ef3-4848-8242-0970cdccdfda {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.337670] env[61545]: DEBUG nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1085.343988] env[61545]: DEBUG nova.compute.manager [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1085.429439] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.430192] env[61545]: DEBUG nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1085.433581] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.637s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.433834] env[61545]: DEBUG nova.objects.instance [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lazy-loading 'resources' on Instance uuid 7301c541-664f-43ec-8a34-86f38cac22ab {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.806478] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.806856] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.807143] env[61545]: DEBUG nova.network.neutron [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1085.809972] env[61545]: WARNING nova.compute.manager [None req-5f12bd50-cbc3-497f-86b8-979e0473e669 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Image not found during snapshot: nova.exception.ImageNotFound: Image c9e2dc03-db2f-475a-873a-e576bc26dc1f could not be found. [ 1085.853997] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1085.857109] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d76ad1c-348b-42fe-9def-ba9c6ae06ea8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.867163] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1085.867163] env[61545]: value = "task-4256474" [ 1085.867163] env[61545]: _type = "Task" [ 1085.867163] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.881394] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256474, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.885110] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.942692] env[61545]: DEBUG nova.compute.utils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1085.946857] env[61545]: DEBUG nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1085.947106] env[61545]: DEBUG nova.network.neutron [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1086.005212] env[61545]: DEBUG nova.policy [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1faebc6afac6456f870ed4ef8a0998f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7140d79e25954245bd4224e7fd4fa791', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1086.198158] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.198158] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.198381] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.198639] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.198961] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.204588] env[61545]: INFO nova.compute.manager [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Terminating instance [ 1086.261913] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398fcd53-0ede-4444-9adb-9bb99be9eae8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.271785] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c98cb8-f8ac-4046-9bac-2381162ae89f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.314197] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda31838-7d33-4f9e-b5bc-d1ed349478e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.320277] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Completed reading data from the image iterator. {{(pid=61545) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1086.320404] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f878a-1151-826e-0ea5-fa47eca1b20b/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1086.321668] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22601d0-2a78-4611-8556-2ba26cd1d04d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.334442] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f6610e-e428-4cc7-87c6-b975a872bc94 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.338359] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f878a-1151-826e-0ea5-fa47eca1b20b/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1086.338625] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f878a-1151-826e-0ea5-fa47eca1b20b/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1086.338833] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-22131b08-fe15-47c6-8323-3ce4e6b49697 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.354672] env[61545]: DEBUG nova.compute.provider_tree [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.380196] env[61545]: DEBUG oslo_vmware.api [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256474, 'name': PowerOffVM_Task, 'duration_secs': 0.349142} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.380495] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1086.380787] env[61545]: DEBUG nova.compute.manager [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1086.381551] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92fc918-e799-439e-8893-3a238778368c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.393214] env[61545]: WARNING nova.network.neutron [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] 980f8e73-b8ce-492a-90f5-f43e01dc44cd already exists in list: networks containing: ['980f8e73-b8ce-492a-90f5-f43e01dc44cd']. ignoring it [ 1086.450819] env[61545]: DEBUG nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1086.606188] env[61545]: DEBUG oslo_vmware.rw_handles [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f878a-1151-826e-0ea5-fa47eca1b20b/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1086.606402] env[61545]: INFO nova.virt.vmwareapi.images [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Downloaded image file data 623bd67d-a8b3-4e1b-9169-f7cd1bc393b7 [ 1086.607399] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da342fdc-1431-48c0-803f-80823b60f9fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.626648] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bda716a5-7b77-4d6b-a06c-f9277b6035c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.647134] env[61545]: DEBUG nova.network.neutron [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Successfully created port: 208c9c7b-7408-4bea-a6d1-34d97f61dabf {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.655883] env[61545]: INFO nova.virt.vmwareapi.images [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] The imported VM was unregistered [ 1086.658974] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Caching image {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1086.659293] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Creating directory with path [datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1086.659704] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e641f787-8e4d-46ef-9827-2f0aba871b31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.673817] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Created directory with path [datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1086.674106] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024/OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024.vmdk to [datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7.vmdk. {{(pid=61545) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1086.674361] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-03d187b9-cc8c-40d8-9278-9bf3d0f2ae40 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.682982] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1086.682982] env[61545]: value = "task-4256476" [ 1086.682982] env[61545]: _type = "Task" [ 1086.682982] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.695038] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256476, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.716718] env[61545]: DEBUG nova.compute.manager [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1086.717024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.717916] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59545f87-3fed-40c1-931d-96d1336e9945 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.726382] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.726691] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e94e3e3-dc41-473f-b61d-83dff0e21c1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.737299] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1086.737299] env[61545]: value = "task-4256477" [ 1086.737299] env[61545]: _type = "Task" [ 1086.737299] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.748944] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.859762] env[61545]: DEBUG nova.scheduler.client.report [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1086.900199] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.572s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.170949] env[61545]: DEBUG nova.network.neutron [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [{"id": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "address": "fa:16:3e:e1:1e:c2", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce640f58-ba", "ovs_interfaceid": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "281269ec-aad9-4fd1-bf3e-ba8f1bc9923f", "address": "fa:16:3e:e1:69:94", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap281269ec-aa", "ovs_interfaceid": "281269ec-aad9-4fd1-bf3e-ba8f1bc9923f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.199122] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256476, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.251348] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256477, 'name': PowerOffVM_Task, 'duration_secs': 0.331271} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.251637] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1087.251841] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1087.252131] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8367a422-d7fb-4a26-ae22-1bfb53f35c9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.312274] env[61545]: DEBUG nova.compute.manager [req-cce44029-636d-4b22-8639-f974824d3ea6 req-e6839057-5331-4feb-bc52-b1b88b5ed0ee service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received event network-changed-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1087.312385] env[61545]: DEBUG nova.compute.manager [req-cce44029-636d-4b22-8639-f974824d3ea6 req-e6839057-5331-4feb-bc52-b1b88b5ed0ee service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Refreshing instance network info cache due to event network-changed-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1087.312645] env[61545]: DEBUG oslo_concurrency.lockutils [req-cce44029-636d-4b22-8639-f974824d3ea6 req-e6839057-5331-4feb-bc52-b1b88b5ed0ee service nova] Acquiring lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.334055] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.334488] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.334782] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleting the datastore file [datastore2] db2d0e21-f6bb-4f61-8d54-e9191de13a59 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.335099] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61c08dee-7355-469c-ab25-aa9786d16504 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.344812] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for the task: (returnval){ [ 1087.344812] env[61545]: value = "task-4256479" [ 1087.344812] env[61545]: _type = "Task" [ 1087.344812] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.357104] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.365164] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.931s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.367747] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.654s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.369576] env[61545]: INFO nova.compute.claims [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.395451] env[61545]: INFO nova.scheduler.client.report [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Deleted allocations for instance 7301c541-664f-43ec-8a34-86f38cac22ab [ 1087.408367] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.463941] env[61545]: DEBUG nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1087.495125] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1087.495432] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.495710] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1087.495834] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.495997] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1087.496209] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1087.496391] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1087.496563] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1087.496747] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1087.496974] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1087.497187] env[61545]: DEBUG nova.virt.hardware [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1087.498202] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0df0422-8f24-4e79-a0ca-f8eebe8dbd15 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.509741] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42418a0a-94d4-47fe-8dff-22fb85762af2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.673815] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.674514] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.674674] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.674973] env[61545]: DEBUG oslo_concurrency.lockutils [req-cce44029-636d-4b22-8639-f974824d3ea6 req-e6839057-5331-4feb-bc52-b1b88b5ed0ee service nova] Acquired lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.675187] env[61545]: DEBUG nova.network.neutron [req-cce44029-636d-4b22-8639-f974824d3ea6 req-e6839057-5331-4feb-bc52-b1b88b5ed0ee service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Refreshing network info cache for port 281269ec-aad9-4fd1-bf3e-ba8f1bc9923f {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1087.677086] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94c87a0-d1d1-475c-9eff-dc98114ff093 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.700317] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1087.700669] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.700835] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1087.701034] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.701186] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1087.701335] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1087.701543] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1087.701702] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1087.701868] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1087.702062] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1087.702259] env[61545]: DEBUG nova.virt.hardware [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1087.708856] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Reconfiguring VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1087.710159] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bfadd2c-de8e-46c9-ac87-edc1fbdee6a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.728032] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256476, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.737047] env[61545]: DEBUG oslo_vmware.api [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1087.737047] env[61545]: value = "task-4256480" [ 1087.737047] env[61545]: _type = "Task" [ 1087.737047] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.749029] env[61545]: DEBUG oslo_vmware.api [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256480, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.857982] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.904319] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6120f30b-d328-4359-a43c-826189a8dac1 tempest-ImagesOneServerTestJSON-1832733047 tempest-ImagesOneServerTestJSON-1832733047-project-member] Lock "7301c541-664f-43ec-8a34-86f38cac22ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.079s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.196523] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256476, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.250908] env[61545]: DEBUG oslo_vmware.api [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256480, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.358538] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.507320] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "2f8567b1-7291-4705-8ef3-23547eb4860e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.507652] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.507875] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "2f8567b1-7291-4705-8ef3-23547eb4860e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.508125] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.508859] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.513394] env[61545]: INFO nova.compute.manager [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Terminating instance [ 1088.577433] env[61545]: DEBUG nova.network.neutron [req-cce44029-636d-4b22-8639-f974824d3ea6 req-e6839057-5331-4feb-bc52-b1b88b5ed0ee service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updated VIF entry in instance network info cache for port 281269ec-aad9-4fd1-bf3e-ba8f1bc9923f. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1088.578077] env[61545]: DEBUG nova.network.neutron [req-cce44029-636d-4b22-8639-f974824d3ea6 req-e6839057-5331-4feb-bc52-b1b88b5ed0ee service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [{"id": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "address": "fa:16:3e:e1:1e:c2", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce640f58-ba", "ovs_interfaceid": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "281269ec-aad9-4fd1-bf3e-ba8f1bc9923f", "address": "fa:16:3e:e1:69:94", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap281269ec-aa", "ovs_interfaceid": "281269ec-aad9-4fd1-bf3e-ba8f1bc9923f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.687322] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8555313d-b91a-4f3e-815d-2f74232b89e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.701841] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256476, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.704446] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16e1252-be08-4eb7-a8d6-c51ba5f80851 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.753306] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e99dfd-cf64-4511-97fd-b6d059bdbdd0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.768916] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad67a589-6158-4cca-900f-0a3b18958ce9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.775083] env[61545]: DEBUG oslo_vmware.api [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256480, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.789549] env[61545]: DEBUG nova.compute.provider_tree [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.854525] env[61545]: DEBUG nova.network.neutron [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Successfully updated port: 208c9c7b-7408-4bea-a6d1-34d97f61dabf {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.867304] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.018187] env[61545]: DEBUG nova.compute.manager [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.018576] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.019443] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22087b73-eadd-49d1-91d9-262c4506a09f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.034023] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.034023] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85023e25-7ded-48a0-b4da-b5a860c7cd47 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.081040] env[61545]: DEBUG oslo_concurrency.lockutils [req-cce44029-636d-4b22-8639-f974824d3ea6 req-e6839057-5331-4feb-bc52-b1b88b5ed0ee service nova] Releasing lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.154224] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.154571] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.154734] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleting the datastore file [datastore2] 2f8567b1-7291-4705-8ef3-23547eb4860e {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.155148] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-794e9290-8a8f-434d-868f-03c8ebf21a1a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.167914] env[61545]: DEBUG oslo_vmware.api [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1089.167914] env[61545]: value = "task-4256482" [ 1089.167914] env[61545]: _type = "Task" [ 1089.167914] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.181196] env[61545]: DEBUG oslo_vmware.api [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.200030] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256476, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.258280] env[61545]: DEBUG oslo_vmware.api [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256480, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.292524] env[61545]: DEBUG nova.scheduler.client.report [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.349214] env[61545]: DEBUG nova.compute.manager [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Received event network-vif-plugged-208c9c7b-7408-4bea-a6d1-34d97f61dabf {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1089.349782] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] Acquiring lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.349900] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] Lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.350143] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] Lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.350697] env[61545]: DEBUG nova.compute.manager [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] No waiting events found dispatching network-vif-plugged-208c9c7b-7408-4bea-a6d1-34d97f61dabf {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1089.350976] env[61545]: WARNING nova.compute.manager [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Received unexpected event network-vif-plugged-208c9c7b-7408-4bea-a6d1-34d97f61dabf for instance with vm_state building and task_state spawning. [ 1089.351218] env[61545]: DEBUG nova.compute.manager [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Received event network-changed-208c9c7b-7408-4bea-a6d1-34d97f61dabf {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1089.351432] env[61545]: DEBUG nova.compute.manager [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Refreshing instance network info cache due to event network-changed-208c9c7b-7408-4bea-a6d1-34d97f61dabf. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1089.351732] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] Acquiring lock "refresh_cache-6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.351897] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] Acquired lock "refresh_cache-6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.352558] env[61545]: DEBUG nova.network.neutron [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Refreshing network info cache for port 208c9c7b-7408-4bea-a6d1-34d97f61dabf {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.361307] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquiring lock "refresh_cache-6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.366592] env[61545]: DEBUG oslo_vmware.api [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Task: {'id': task-4256479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.896592} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.367106] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1089.367466] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1089.367824] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1089.368166] env[61545]: INFO nova.compute.manager [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Took 2.65 seconds to destroy the instance on the hypervisor. [ 1089.368572] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.369316] env[61545]: DEBUG nova.compute.manager [-] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1089.369567] env[61545]: DEBUG nova.network.neutron [-] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1089.679912] env[61545]: DEBUG oslo_vmware.api [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241803} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.680261] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1089.681589] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1089.681589] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1089.681589] env[61545]: INFO nova.compute.manager [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1089.681589] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.681589] env[61545]: DEBUG nova.compute.manager [-] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1089.681589] env[61545]: DEBUG nova.network.neutron [-] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1089.697553] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256476, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.631031} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.699384] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024/OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024.vmdk to [datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7.vmdk. [ 1089.699752] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Cleaning up location [datastore2] OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1089.700049] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_3d81129e-ef5a-46a0-92c7-690fdda40024 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.700434] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a31961c1-3564-4afe-97f2-f5c3d196a705 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.707430] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1089.707430] env[61545]: value = "task-4256483" [ 1089.707430] env[61545]: _type = "Task" [ 1089.707430] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.717798] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256483, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.759305] env[61545]: DEBUG oslo_vmware.api [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256480, 'name': ReconfigVM_Task, 'duration_secs': 1.624064} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.760350] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.760350] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Reconfigured VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1089.801017] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.801017] env[61545]: DEBUG nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1089.804529] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.281s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.805170] env[61545]: DEBUG nova.objects.instance [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lazy-loading 'resources' on Instance uuid 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.919817] env[61545]: DEBUG nova.network.neutron [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1090.052016] env[61545]: DEBUG nova.network.neutron [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.203406] env[61545]: DEBUG nova.network.neutron [-] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.223157] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256483, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035309} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.223566] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.223835] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.224231] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7.vmdk to [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553/f9c9c447-e676-4143-b329-fb6d71bcd553.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.224603] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d20734b-a2f4-4a26-ab03-530bab010c31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.235546] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1090.235546] env[61545]: value = "task-4256484" [ 1090.235546] env[61545]: _type = "Task" [ 1090.235546] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.248710] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.265365] env[61545]: DEBUG oslo_concurrency.lockutils [None req-336bb88d-8148-4ee5-8e0f-7fc987621b38 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-97b72809-2a1e-4eda-af82-71cac2d79a64-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.695s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.307065] env[61545]: DEBUG nova.compute.utils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1090.308703] env[61545]: DEBUG nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.308951] env[61545]: DEBUG nova.network.neutron [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1090.367747] env[61545]: DEBUG nova.policy [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7a70fb8ea2d498688688f7e51cf4bac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45efa52890714522b3058b7144b42a89', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1090.454505] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "e21de424-8121-4e2f-84c2-8096ba8048cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.454999] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "e21de424-8121-4e2f-84c2-8096ba8048cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.455617] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "e21de424-8121-4e2f-84c2-8096ba8048cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.456036] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "e21de424-8121-4e2f-84c2-8096ba8048cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.456745] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "e21de424-8121-4e2f-84c2-8096ba8048cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.462249] env[61545]: INFO nova.compute.manager [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Terminating instance [ 1090.559833] env[61545]: DEBUG oslo_concurrency.lockutils [req-cb82c605-3c6a-482f-995a-29d17a6f5af9 req-1fdbd964-45da-4617-87e6-95a2bb5ff5aa service nova] Releasing lock "refresh_cache-6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.559833] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquired lock "refresh_cache-6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.559833] env[61545]: DEBUG nova.network.neutron [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1090.567636] env[61545]: DEBUG nova.network.neutron [-] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.649306] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b564e1ee-5496-49df-98ea-dcfaee21d021 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.659699] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d5ae0b-0eaa-4884-8ca7-cea47f32c06c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.701411] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf32611-5752-4296-8424-2435d031744e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.706014] env[61545]: INFO nova.compute.manager [-] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Took 1.34 seconds to deallocate network for instance. [ 1090.717468] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5ff2a8-290c-4d12-921f-83faed8a140f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.738061] env[61545]: DEBUG nova.compute.provider_tree [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.754654] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256484, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.816533] env[61545]: DEBUG nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1090.918640] env[61545]: DEBUG nova.network.neutron [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Successfully created port: b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.969488] env[61545]: DEBUG nova.compute.manager [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1090.969809] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.971180] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf24e204-6847-49c7-a92b-23196008800c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.983542] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.983939] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73377394-031f-4c62-8438-4ef28ca758d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.993721] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1090.993721] env[61545]: value = "task-4256485" [ 1090.993721] env[61545]: _type = "Task" [ 1090.993721] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.008704] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.074455] env[61545]: INFO nova.compute.manager [-] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Took 1.39 seconds to deallocate network for instance. [ 1091.157041] env[61545]: DEBUG nova.network.neutron [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1091.224627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.246458] env[61545]: DEBUG nova.scheduler.client.report [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.261304] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256484, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.508143] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.533447] env[61545]: DEBUG nova.compute.manager [req-3809450b-a526-4aa6-9a84-373c1dd119ec req-2a9386c9-002a-41d8-aad3-80b182e13f75 service nova] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Received event network-vif-deleted-56c23893-2f0c-48e2-ad52-6dcdf0869d98 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1091.533578] env[61545]: DEBUG nova.compute.manager [req-3809450b-a526-4aa6-9a84-373c1dd119ec req-2a9386c9-002a-41d8-aad3-80b182e13f75 service nova] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Received event network-vif-deleted-5a70c23b-fb28-4302-add1-42b019c5c4f7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1091.586418] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.752906] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.948s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.760107] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.191s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.760437] env[61545]: DEBUG nova.objects.instance [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'resources' on Instance uuid b1277c3b-cd7b-43be-9eff-640145dde5e5 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.765021] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256484, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.792188] env[61545]: INFO nova.scheduler.client.report [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Deleted allocations for instance 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc [ 1091.816406] env[61545]: DEBUG nova.network.neutron [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Updating instance_info_cache with network_info: [{"id": "208c9c7b-7408-4bea-a6d1-34d97f61dabf", "address": "fa:16:3e:11:67:21", "network": {"id": "96899ef0-6ac2-4783-a983-dab99c80a97e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1878499646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7140d79e25954245bd4224e7fd4fa791", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap208c9c7b-74", "ovs_interfaceid": "208c9c7b-7408-4bea-a6d1-34d97f61dabf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.832278] env[61545]: DEBUG nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1091.881064] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1091.881394] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1091.881566] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1091.881752] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1091.881898] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1091.882464] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1091.882464] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1091.882620] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1091.883165] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1091.883165] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1091.883309] env[61545]: DEBUG nova.virt.hardware [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1091.884227] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09be9ef2-4915-4bdf-befd-f00b30b19b56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.896755] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156ed998-5bfc-4882-a261-154c06424e5c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.011188] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256485, 'name': PowerOffVM_Task, 'duration_secs': 0.626026} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.011579] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.011643] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.012057] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1a1a627-1bab-4f7c-8bb3-d579329ca284 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.117529] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.117955] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.118024] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleting the datastore file [datastore2] e21de424-8121-4e2f-84c2-8096ba8048cc {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.118328] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8decaf17-e76f-4766-96db-8247b797a523 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.129458] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1092.129458] env[61545]: value = "task-4256487" [ 1092.129458] env[61545]: _type = "Task" [ 1092.129458] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.140685] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.254076] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256484, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.303814] env[61545]: DEBUG oslo_concurrency.lockutils [None req-326ffa23-3dd6-4e06-bfbf-3fdf60f04ba6 tempest-ServersAdminTestJSON-1231961305 tempest-ServersAdminTestJSON-1231961305-project-member] Lock "7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.254s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.322024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Releasing lock "refresh_cache-6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.322024] env[61545]: DEBUG nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Instance network_info: |[{"id": "208c9c7b-7408-4bea-a6d1-34d97f61dabf", "address": "fa:16:3e:11:67:21", "network": {"id": "96899ef0-6ac2-4783-a983-dab99c80a97e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1878499646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7140d79e25954245bd4224e7fd4fa791", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap208c9c7b-74", "ovs_interfaceid": "208c9c7b-7408-4bea-a6d1-34d97f61dabf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1092.322024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:67:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c405e9f-a6c8-4308-acac-071654efe18e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '208c9c7b-7408-4bea-a6d1-34d97f61dabf', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.330992] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Creating folder: Project (7140d79e25954245bd4224e7fd4fa791). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1092.332781] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06183f2d-3134-4f84-b96a-ca02ed1b280f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.358207] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Created folder: Project (7140d79e25954245bd4224e7fd4fa791) in parent group-v838542. [ 1092.358207] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Creating folder: Instances. Parent ref: group-v838797. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1092.358207] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84c89591-9546-4071-baab-42f4eb793bda {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.378043] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Created folder: Instances in parent group-v838797. [ 1092.378364] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.379068] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.379319] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54884bd1-05d1-4b9e-b6bd-17bcc73bb40b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.407686] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.407686] env[61545]: value = "task-4256490" [ 1092.407686] env[61545]: _type = "Task" [ 1092.407686] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.420657] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256490, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.616728] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f65a448-85b1-496d-8f33-695cfb62b0e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.628926] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f991d7ea-14bb-464f-b53d-1bd528bb348b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.643585] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.678523] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66ff75c-cc2a-4336-a123-7fe8166e9ac3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.691541] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640333da-6c4f-447a-9e1a-1d1d3c45e3b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.717928] env[61545]: DEBUG nova.compute.provider_tree [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.757678] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-97b72809-2a1e-4eda-af82-71cac2d79a64-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.757678] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-97b72809-2a1e-4eda-af82-71cac2d79a64-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.758075] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256484, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.924798] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256490, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.142973] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.222367] env[61545]: DEBUG nova.scheduler.client.report [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1093.251795] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256484, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.638907} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.252118] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7/623bd67d-a8b3-4e1b-9169-f7cd1bc393b7.vmdk to [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553/f9c9c447-e676-4143-b329-fb6d71bcd553.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1093.253232] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2a05fd-b69c-4e30-902d-cd8c61a30ab8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.267847] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.268068] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.277148] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553/f9c9c447-e676-4143-b329-fb6d71bcd553.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.278029] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7472b25-2887-4215-9abb-e09dbf9637ee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.280973] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffac9f4a-2f11-4218-abed-e4484351dc7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.311692] env[61545]: DEBUG nova.network.neutron [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Successfully updated port: b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.314528] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d05305-7e2f-4ce5-acaa-98fc7b7618be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.318913] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1093.318913] env[61545]: value = "task-4256491" [ 1093.318913] env[61545]: _type = "Task" [ 1093.318913] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.344213] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Reconfiguring VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1093.345436] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b6e2ba7-904e-4a8b-8c55-6dd4f84a41a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.363091] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.368550] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1093.368550] env[61545]: value = "task-4256492" [ 1093.368550] env[61545]: _type = "Task" [ 1093.368550] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.377549] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.422642] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256490, 'name': CreateVM_Task, 'duration_secs': 0.665104} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.422829] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1093.423630] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.423802] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.425237] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1093.425237] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00e41b37-9e54-488d-b18f-9fead094aeda {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.429765] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1093.429765] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52972756-ce66-a670-627e-3f3429306e24" [ 1093.429765] env[61545]: _type = "Task" [ 1093.429765] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.440297] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52972756-ce66-a670-627e-3f3429306e24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.579705] env[61545]: DEBUG nova.compute.manager [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Received event network-vif-plugged-b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1093.580031] env[61545]: DEBUG oslo_concurrency.lockutils [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] Acquiring lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.580334] env[61545]: DEBUG oslo_concurrency.lockutils [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.580552] env[61545]: DEBUG oslo_concurrency.lockutils [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.580813] env[61545]: DEBUG nova.compute.manager [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] No waiting events found dispatching network-vif-plugged-b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1093.580911] env[61545]: WARNING nova.compute.manager [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Received unexpected event network-vif-plugged-b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc for instance with vm_state building and task_state spawning. [ 1093.581219] env[61545]: DEBUG nova.compute.manager [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Received event network-changed-b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1093.581372] env[61545]: DEBUG nova.compute.manager [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Refreshing instance network info cache due to event network-changed-b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1093.581508] env[61545]: DEBUG oslo_concurrency.lockutils [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] Acquiring lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.581648] env[61545]: DEBUG oslo_concurrency.lockutils [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] Acquired lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.581811] env[61545]: DEBUG nova.network.neutron [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Refreshing network info cache for port b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1093.644186] env[61545]: DEBUG oslo_vmware.api [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.183984} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.644471] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1093.644660] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1093.644851] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1093.645047] env[61545]: INFO nova.compute.manager [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Took 2.68 seconds to destroy the instance on the hypervisor. [ 1093.645301] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1093.645497] env[61545]: DEBUG nova.compute.manager [-] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1093.645590] env[61545]: DEBUG nova.network.neutron [-] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1093.728202] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.731730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.884s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.733079] env[61545]: INFO nova.compute.claims [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1093.775870] env[61545]: INFO nova.scheduler.client.report [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleted allocations for instance b1277c3b-cd7b-43be-9eff-640145dde5e5 [ 1093.819602] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.835792] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256491, 'name': ReconfigVM_Task, 'duration_secs': 0.315112} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.836237] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Reconfigured VM instance instance-00000040 to attach disk [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553/f9c9c447-e676-4143-b329-fb6d71bcd553.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1093.837420] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ada0ad1-4d42-4d5f-83c4-8f44b559d0ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.845245] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1093.845245] env[61545]: value = "task-4256493" [ 1093.845245] env[61545]: _type = "Task" [ 1093.845245] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.855955] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256493, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.880033] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.940538] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52972756-ce66-a670-627e-3f3429306e24, 'name': SearchDatastore_Task, 'duration_secs': 0.010683} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.940768] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.940923] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1093.941162] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.941315] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.941499] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1093.941772] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c1b4cc6-ba6b-4bac-abf6-95d18eceb929 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.951829] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1093.952291] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1093.952810] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f86f4b8e-8f8e-4b5d-adea-c0d09a212c08 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.960551] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1093.960551] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52abd43e-f245-085e-8532-be1188d98821" [ 1093.960551] env[61545]: _type = "Task" [ 1093.960551] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.970537] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52abd43e-f245-085e-8532-be1188d98821, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.150260] env[61545]: DEBUG nova.network.neutron [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1094.291294] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08679ac6-0829-40fd-81a7-fd0f8caf6bd2 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "b1277c3b-cd7b-43be-9eff-640145dde5e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.323s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.295627] env[61545]: DEBUG nova.network.neutron [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.358474] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256493, 'name': Rename_Task, 'duration_secs': 0.14564} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.358950] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1094.359344] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd652195-dffa-473b-bcc0-d5f99e7e47f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.369481] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1094.369481] env[61545]: value = "task-4256494" [ 1094.369481] env[61545]: _type = "Task" [ 1094.369481] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.389759] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.390302] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.471978] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52abd43e-f245-085e-8532-be1188d98821, 'name': SearchDatastore_Task, 'duration_secs': 0.011514} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.472826] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32bef564-1def-43e6-a697-14b8f97b103e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.478884] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1094.478884] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a8329a-c82b-234e-102e-106488fb3ba9" [ 1094.478884] env[61545]: _type = "Task" [ 1094.478884] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.491023] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a8329a-c82b-234e-102e-106488fb3ba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.798664] env[61545]: DEBUG oslo_concurrency.lockutils [req-252dc62d-674d-4393-b07b-158bdc3c9582 req-9e066858-cd24-4535-b491-fe84bc302202 service nova] Releasing lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.803031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.803031] env[61545]: DEBUG nova.network.neutron [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1094.888633] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256494, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.892698] env[61545]: DEBUG nova.network.neutron [-] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.894025] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.993046] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a8329a-c82b-234e-102e-106488fb3ba9, 'name': SearchDatastore_Task, 'duration_secs': 0.009641} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.993394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.993664] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a/6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1094.993943] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c671a9ce-f9c4-4074-8e16-92acfd146d6f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.999543] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06b8658-de69-4671-8ff1-bc0f38a4dd16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.005516] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1095.005516] env[61545]: value = "task-4256495" [ 1095.005516] env[61545]: _type = "Task" [ 1095.005516] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.012693] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edeaf7ba-cba5-444b-90a9-385c00a38881 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.019492] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.049720] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853f0530-e564-46a8-b840-e804bc04e531 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.059122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befa5ee9-fb0e-4f4a-9c6c-78ea63b2d9d8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.074700] env[61545]: DEBUG nova.compute.provider_tree [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.165752] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquiring lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.166153] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.166259] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquiring lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.166498] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.166943] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.169745] env[61545]: INFO nova.compute.manager [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Terminating instance [ 1095.377923] env[61545]: DEBUG nova.network.neutron [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1095.387966] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256494, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.397025] env[61545]: INFO nova.compute.manager [-] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Took 1.75 seconds to deallocate network for instance. [ 1095.397368] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.520272] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256495, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.580538] env[61545]: DEBUG nova.scheduler.client.report [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1095.600603] env[61545]: DEBUG nova.network.neutron [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance_info_cache with network_info: [{"id": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "address": "fa:16:3e:ed:19:86", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb793a557-df", "ovs_interfaceid": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.621012] env[61545]: DEBUG nova.compute.manager [req-4894f8a8-47bb-4c8b-97d1-40bb91741ff5 req-2795d2c5-f267-40fb-9aa3-4d3ae3628461 service nova] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Received event network-vif-deleted-bb1a376a-94fe-4856-ab21-de3f98dda10e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1095.674364] env[61545]: DEBUG nova.compute.manager [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1095.674593] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.675521] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e8a4fb-af45-4834-8f19-bfb920cc9796 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.684913] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.685208] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14eacc4a-7c23-4523-a0b1-9c767922990b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.693497] env[61545]: DEBUG oslo_vmware.api [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1095.693497] env[61545]: value = "task-4256496" [ 1095.693497] env[61545]: _type = "Task" [ 1095.693497] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.706324] env[61545]: DEBUG oslo_vmware.api [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.882036] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256494, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.900071] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.910356] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.018849] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256495, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527025} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.018849] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a/6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1096.018849] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1096.018849] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-508d7224-f0f5-4432-8c71-e00da530c897 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.028591] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1096.028591] env[61545]: value = "task-4256497" [ 1096.028591] env[61545]: _type = "Task" [ 1096.028591] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.037795] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.084899] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.085550] env[61545]: DEBUG nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1096.088622] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.135s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.090219] env[61545]: INFO nova.compute.claims [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1096.103552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.103924] env[61545]: DEBUG nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Instance network_info: |[{"id": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "address": "fa:16:3e:ed:19:86", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb793a557-df", "ovs_interfaceid": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1096.104447] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:19:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1096.112569] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.112796] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1096.113044] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac81d21a-c97a-4126-a848-b7568295340a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.136782] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1096.136782] env[61545]: value = "task-4256498" [ 1096.136782] env[61545]: _type = "Task" [ 1096.136782] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.146385] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256498, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.205963] env[61545]: DEBUG oslo_vmware.api [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256496, 'name': PowerOffVM_Task, 'duration_secs': 0.259948} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.206366] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.206542] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.206918] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c22bea76-1c6d-4500-b48f-eea01fb51db6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.382867] env[61545]: DEBUG oslo_vmware.api [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256494, 'name': PowerOnVM_Task, 'duration_secs': 1.761504} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.386692] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1096.397654] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.535129] env[61545]: DEBUG nova.compute.manager [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1096.536063] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdc6237-fa0c-4671-a11f-0dc6b77584d9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.545880] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256497, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082751} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.548202] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1096.551919] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725a2cc3-e392-4272-abe8-ce03739fc8bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.575524] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a/6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1096.576159] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06456b16-8425-4900-9168-02b97afc3b18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.596708] env[61545]: DEBUG nova.compute.utils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1096.600270] env[61545]: DEBUG nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1096.600473] env[61545]: DEBUG nova.network.neutron [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1096.606671] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1096.606671] env[61545]: value = "task-4256500" [ 1096.606671] env[61545]: _type = "Task" [ 1096.606671] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.622557] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256500, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.626174] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.626174] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.626174] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Deleting the datastore file [datastore2] 91eeceeb-c11e-414b-8ae6-e68e927f1f1e {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.626174] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6dfc5de6-94ed-4ee8-90c9-b60b79de32be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.634224] env[61545]: DEBUG oslo_vmware.api [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for the task: (returnval){ [ 1096.634224] env[61545]: value = "task-4256501" [ 1096.634224] env[61545]: _type = "Task" [ 1096.634224] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.650968] env[61545]: DEBUG oslo_vmware.api [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.654371] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256498, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.693987] env[61545]: DEBUG nova.policy [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25fd6c8662bd4b7f9da546ec78acda02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68a860104885480d9da472bc969ba6d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1096.895799] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.069248] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b3555fb2-0dcd-4597-98a0-940f7a11b8e3 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.761s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.111718] env[61545]: DEBUG nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1097.125627] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.151363] env[61545]: DEBUG oslo_vmware.api [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Task: {'id': task-4256501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222613} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.151363] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.151363] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.151363] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.151363] env[61545]: INFO nova.compute.manager [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Took 1.48 seconds to destroy the instance on the hypervisor. [ 1097.151363] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1097.151363] env[61545]: DEBUG nova.compute.manager [-] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1097.151363] env[61545]: DEBUG nova.network.neutron [-] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1097.156125] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256498, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.357028] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cea3b8c-0e16-4d23-95f2-4816b21672c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.365690] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26582d42-0cd0-4757-ad22-810e86e306fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.404834] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ca9c0f-604f-41f2-aa78-b97b3c87776d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.414561] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.419831] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0162126b-6998-4602-8a76-b398ac170496 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.435697] env[61545]: DEBUG nova.compute.provider_tree [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1097.478016] env[61545]: DEBUG nova.network.neutron [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Successfully created port: 169863f3-63da-4e6e-8dbb-514951796bf8 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1097.632512] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256500, 'name': ReconfigVM_Task, 'duration_secs': 0.750192} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.632789] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a/6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1097.634827] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea4ffaa6-2446-448c-be5a-c176f854856b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.646914] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1097.646914] env[61545]: value = "task-4256502" [ 1097.646914] env[61545]: _type = "Task" [ 1097.646914] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.659602] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256498, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.663268] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256502, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.755574] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.756142] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.914494] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.958491] env[61545]: ERROR nova.scheduler.client.report [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [req-e3800842-cca7-4dbe-b775-0c58ec037b9e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e3800842-cca7-4dbe-b775-0c58ec037b9e"}]} [ 1097.979676] env[61545]: DEBUG nova.scheduler.client.report [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1097.994891] env[61545]: DEBUG nova.scheduler.client.report [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1097.995160] env[61545]: DEBUG nova.compute.provider_tree [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1098.009805] env[61545]: DEBUG nova.scheduler.client.report [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1098.034604] env[61545]: DEBUG nova.scheduler.client.report [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1098.131369] env[61545]: DEBUG nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1098.150545] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256498, 'name': CreateVM_Task, 'duration_secs': 1.829088} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.159468] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1098.160990] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.161331] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.161823] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1098.165743] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e81d0fff-3153-4612-ad22-a65fd2229fcf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.171886] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256502, 'name': Rename_Task, 'duration_secs': 0.190821} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.174883] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1098.177683] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.177827] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.177953] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.178147] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.178289] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.178428] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.178632] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.178804] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.178990] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.179168] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.179338] env[61545]: DEBUG nova.virt.hardware [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.180963] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52baa061-6c07-40f4-9547-ea7e48c7269b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.183722] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea75032-ea35-4a64-8f58-24231ae9716b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.187105] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1098.187105] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526c14e6-c84c-11f9-4eb6-42c95649ad99" [ 1098.187105] env[61545]: _type = "Task" [ 1098.187105] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.200386] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16941f92-23a8-4379-970a-04f9ed9df4be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.204879] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1098.204879] env[61545]: value = "task-4256503" [ 1098.204879] env[61545]: _type = "Task" [ 1098.204879] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.208441] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526c14e6-c84c-11f9-4eb6-42c95649ad99, 'name': SearchDatastore_Task, 'duration_secs': 0.010893} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.213407] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.213732] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1098.213991] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.214196] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.214373] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1098.227886] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21f37584-4910-43aa-b1cd-22d725c1556f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.237252] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256503, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.239481] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1098.239759] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1098.241121] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2fbe58f-7218-4fb7-b00c-da87ed490965 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.251365] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1098.251365] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52150d83-b3fd-c38b-3277-bec6ae82d787" [ 1098.251365] env[61545]: _type = "Task" [ 1098.251365] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.261129] env[61545]: DEBUG nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1098.267669] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52150d83-b3fd-c38b-3277-bec6ae82d787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.366259] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78df38b-16ef-4eba-9333-aaf4a87d78a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.375527] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1223c9b3-28a1-4b35-9919-f11a6723563b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.415961] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4d350a-37bf-4fed-af42-e55c61d66bda {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.429398] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.430714] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c33d55-5300-4a25-b2d9-86e3d4a4938f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.447530] env[61545]: DEBUG nova.compute.provider_tree [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1098.557469] env[61545]: DEBUG nova.compute.manager [req-7267a91c-b3d1-47ec-a9a5-c124abe43b60 req-4ad492a4-6415-405c-ab73-e2f44290b551 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Received event network-vif-deleted-5f4a8e1e-aa1b-4785-8467-54496ef2257b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1098.557469] env[61545]: INFO nova.compute.manager [req-7267a91c-b3d1-47ec-a9a5-c124abe43b60 req-4ad492a4-6415-405c-ab73-e2f44290b551 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Neutron deleted interface 5f4a8e1e-aa1b-4785-8467-54496ef2257b; detaching it from the instance and deleting it from the info cache [ 1098.557469] env[61545]: DEBUG nova.network.neutron [req-7267a91c-b3d1-47ec-a9a5-c124abe43b60 req-4ad492a4-6415-405c-ab73-e2f44290b551 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.724052] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256503, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.769115] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52150d83-b3fd-c38b-3277-bec6ae82d787, 'name': SearchDatastore_Task, 'duration_secs': 0.013434} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.776310] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f18159c-9f4d-4a3b-bbcd-22fb7940b1f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.785168] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1098.785168] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ec35cb-e96d-65de-2254-3bc54377dfa6" [ 1098.785168] env[61545]: _type = "Task" [ 1098.785168] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.795747] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.800777] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ec35cb-e96d-65de-2254-3bc54377dfa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.926804] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.970957] env[61545]: DEBUG nova.network.neutron [-] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.999334] env[61545]: DEBUG nova.scheduler.client.report [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1098.999683] env[61545]: DEBUG nova.compute.provider_tree [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 123 to 124 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1098.999806] env[61545]: DEBUG nova.compute.provider_tree [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1099.058965] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f7a5ada-6a4b-4c1b-90d5-6697906318e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.071554] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bf599b-9cef-46b6-8d24-8e069191e55f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.107825] env[61545]: DEBUG nova.compute.manager [req-7267a91c-b3d1-47ec-a9a5-c124abe43b60 req-4ad492a4-6415-405c-ab73-e2f44290b551 service nova] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Detach interface failed, port_id=5f4a8e1e-aa1b-4785-8467-54496ef2257b, reason: Instance 91eeceeb-c11e-414b-8ae6-e68e927f1f1e could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1099.220385] env[61545]: DEBUG oslo_vmware.api [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256503, 'name': PowerOnVM_Task, 'duration_secs': 0.677747} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.220690] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1099.220901] env[61545]: INFO nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Took 11.76 seconds to spawn the instance on the hypervisor. [ 1099.221165] env[61545]: DEBUG nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1099.222082] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4eb8fa-cc29-4706-acb5-cdf9c73adadb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.299552] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ec35cb-e96d-65de-2254-3bc54377dfa6, 'name': SearchDatastore_Task, 'duration_secs': 0.013251} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.299981] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.300341] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58/2129a1eb-4ad7-42ef-9554-6202f7a44f58.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1099.301117] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2d28236-89b2-499c-b6cc-49c3b8f2e8f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.309996] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1099.309996] env[61545]: value = "task-4256504" [ 1099.309996] env[61545]: _type = "Task" [ 1099.309996] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.320105] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.425024] env[61545]: DEBUG oslo_vmware.api [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256492, 'name': ReconfigVM_Task, 'duration_secs': 5.812882} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.425024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.425024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Reconfigured VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1099.480448] env[61545]: INFO nova.compute.manager [-] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Took 2.32 seconds to deallocate network for instance. [ 1099.493301] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586dec78-84f2-44f7-b104-6b85d0918394 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.501811] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Suspending the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1099.503021] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-0f276c65-411f-4e7a-b1a5-311e68084741 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.505080] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.416s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.505832] env[61545]: DEBUG nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1099.509275] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.624s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.514023] env[61545]: INFO nova.compute.claims [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.522245] env[61545]: DEBUG oslo_vmware.api [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1099.522245] env[61545]: value = "task-4256505" [ 1099.522245] env[61545]: _type = "Task" [ 1099.522245] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.536049] env[61545]: DEBUG oslo_vmware.api [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256505, 'name': SuspendVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.750929] env[61545]: INFO nova.compute.manager [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Took 23.74 seconds to build instance. [ 1099.822456] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256504, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.858403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.858967] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.859358] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.860532] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.863013] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.865090] env[61545]: INFO nova.compute.manager [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Terminating instance [ 1099.993750] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.019668] env[61545]: DEBUG nova.compute.utils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1100.024015] env[61545]: DEBUG nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1100.024635] env[61545]: DEBUG nova.network.neutron [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1100.042881] env[61545]: DEBUG oslo_vmware.api [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256505, 'name': SuspendVM_Task} progress is 79%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.159979] env[61545]: DEBUG nova.policy [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f01e33a652314f70a08ae1a8087a54cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f4ccb92c3be47f18fd65a22a5a1ad94', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1100.253986] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dae91d74-a042-4051-bcd6-48b591c36dcd tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.261s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.324411] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256504, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58265} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.324660] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58/2129a1eb-4ad7-42ef-9554-6202f7a44f58.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1100.324892] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1100.326233] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fae0b66a-304c-413d-84ee-1a4bc7347dca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.334430] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1100.334430] env[61545]: value = "task-4256506" [ 1100.334430] env[61545]: _type = "Task" [ 1100.334430] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.347878] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256506, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.370512] env[61545]: DEBUG nova.compute.manager [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1100.370656] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1100.371658] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d57496-e154-4fc9-9631-c4b9a9aa1d27 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.375656] env[61545]: DEBUG nova.network.neutron [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Successfully updated port: 169863f3-63da-4e6e-8dbb-514951796bf8 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1100.387683] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1100.388148] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b10a4d1-9542-472d-b10a-bcdced05ddfd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.400997] env[61545]: DEBUG oslo_vmware.api [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1100.400997] env[61545]: value = "task-4256507" [ 1100.400997] env[61545]: _type = "Task" [ 1100.400997] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.425170] env[61545]: DEBUG oslo_vmware.api [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.524438] env[61545]: DEBUG nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1100.544875] env[61545]: DEBUG oslo_vmware.api [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256505, 'name': SuspendVM_Task} progress is 79%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.831856] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1182c4-11b0-4276-8956-e5f0c4f91c54 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.849194] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256506, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.222649} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.849194] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1100.849194] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da3e6e7-be69-44b1-9240-422e3cdb15ee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.859194] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415501fa-605f-428c-a454-95538253fe5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.884381] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58/2129a1eb-4ad7-42ef-9554-6202f7a44f58.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1100.911173] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-79762f13-2f93-43ba-883b-9437c7732c04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.914018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-79762f13-2f93-43ba-883b-9437c7732c04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.914018] env[61545]: DEBUG nova.network.neutron [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1100.914018] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f4b4303-1ad4-476e-b802-d0dbbdeaa2ee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.935074] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3f29f7-c791-4a1f-90c0-8beb5560daba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.943815] env[61545]: DEBUG oslo_vmware.api [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256507, 'name': PowerOffVM_Task, 'duration_secs': 0.237924} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.947036] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1100.947259] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1100.947894] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1100.947894] env[61545]: value = "task-4256508" [ 1100.947894] env[61545]: _type = "Task" [ 1100.947894] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.948185] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61ca7e26-8e11-439f-b0f7-e6de0f300303 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.951387] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbaa79a-c415-4647-b23c-ced2a80e9fe7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.966889] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256508, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.975189] env[61545]: DEBUG nova.compute.provider_tree [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1101.043420] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1101.043758] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1101.044214] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Deleting the datastore file [datastore2] c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.045041] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56ac30d8-5773-4d8c-9f19-48bd51b6f7ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.053637] env[61545]: DEBUG oslo_vmware.api [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256505, 'name': SuspendVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.060639] env[61545]: DEBUG oslo_vmware.api [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1101.060639] env[61545]: value = "task-4256510" [ 1101.060639] env[61545]: _type = "Task" [ 1101.060639] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.073421] env[61545]: DEBUG oslo_vmware.api [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.152309] env[61545]: DEBUG nova.network.neutron [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Successfully created port: 6070a5d2-428a-463d-9f30-9d446eb6a23a {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1101.187631] env[61545]: DEBUG nova.compute.manager [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Received event network-vif-plugged-169863f3-63da-4e6e-8dbb-514951796bf8 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1101.187911] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Acquiring lock "79762f13-2f93-43ba-883b-9437c7732c04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.188185] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Lock "79762f13-2f93-43ba-883b-9437c7732c04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.188392] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Lock "79762f13-2f93-43ba-883b-9437c7732c04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.188611] env[61545]: DEBUG nova.compute.manager [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] No waiting events found dispatching network-vif-plugged-169863f3-63da-4e6e-8dbb-514951796bf8 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1101.188845] env[61545]: WARNING nova.compute.manager [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Received unexpected event network-vif-plugged-169863f3-63da-4e6e-8dbb-514951796bf8 for instance with vm_state building and task_state spawning. [ 1101.189144] env[61545]: DEBUG nova.compute.manager [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received event network-vif-deleted-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1101.189290] env[61545]: INFO nova.compute.manager [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Neutron deleted interface 281269ec-aad9-4fd1-bf3e-ba8f1bc9923f; detaching it from the instance and deleting it from the info cache [ 1101.189584] env[61545]: DEBUG nova.network.neutron [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [{"id": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "address": "fa:16:3e:e1:1e:c2", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce640f58-ba", "ovs_interfaceid": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.217118] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.217452] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.221202] env[61545]: DEBUG nova.network.neutron [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1101.464927] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256508, 'name': ReconfigVM_Task, 'duration_secs': 0.340447} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.465902] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58/2129a1eb-4ad7-42ef-9554-6202f7a44f58.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1101.467029] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fef68c8-20b2-4fc4-bdbc-7103c5ce07e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.474497] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1101.474497] env[61545]: value = "task-4256511" [ 1101.474497] env[61545]: _type = "Task" [ 1101.474497] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.490170] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256511, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.504728] env[61545]: DEBUG nova.network.neutron [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1101.526201] env[61545]: DEBUG nova.scheduler.client.report [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1101.526486] env[61545]: DEBUG nova.compute.provider_tree [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 124 to 125 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1101.526666] env[61545]: DEBUG nova.compute.provider_tree [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1101.545258] env[61545]: DEBUG nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1101.547576] env[61545]: DEBUG oslo_vmware.api [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256505, 'name': SuspendVM_Task, 'duration_secs': 1.634303} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.550504] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Suspended the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1101.550704] env[61545]: DEBUG nova.compute.manager [None req-a2dc13a2-c56f-42d9-9e57-a1cfda55e9cc tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1101.552306] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9353e670-f54e-460c-b0db-fc6a3c20affb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.579691] env[61545]: DEBUG oslo_vmware.api [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205026} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.580080] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1101.580317] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1101.580525] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1101.581217] env[61545]: INFO nova.compute.manager [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1101.582132] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1101.582132] env[61545]: DEBUG nova.compute.manager [-] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1101.582132] env[61545]: DEBUG nova.network.neutron [-] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1101.598947] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1101.599210] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.599372] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1101.599554] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.599699] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1101.599946] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1101.600280] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1101.600427] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1101.600565] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1101.600697] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1101.600832] env[61545]: DEBUG nova.virt.hardware [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1101.602657] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab62acd3-912f-47d3-baac-ec4c258df1d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.614878] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ba6d76-0052-458a-afc5-f3a51501d563 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.693746] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Acquiring lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.694014] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Acquired lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.695791] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1b7e94-c98f-4af9-b9f0-8da0bc5c62e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.716560] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Releasing lock "97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.716909] env[61545]: WARNING nova.compute.manager [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Detach interface failed, port_id=281269ec-aad9-4fd1-bf3e-ba8f1bc9923f, reason: No device with interface-id 281269ec-aad9-4fd1-bf3e-ba8f1bc9923f exists on VM: nova.exception.NotFound: No device with interface-id 281269ec-aad9-4fd1-bf3e-ba8f1bc9923f exists on VM [ 1101.717199] env[61545]: DEBUG nova.compute.manager [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Received event network-changed-169863f3-63da-4e6e-8dbb-514951796bf8 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1101.717342] env[61545]: DEBUG nova.compute.manager [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Refreshing instance network info cache due to event network-changed-169863f3-63da-4e6e-8dbb-514951796bf8. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1101.717622] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Acquiring lock "refresh_cache-79762f13-2f93-43ba-883b-9437c7732c04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.759124] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquiring lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.759442] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.759743] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquiring lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.760074] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.760334] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.763763] env[61545]: INFO nova.compute.manager [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Terminating instance [ 1101.985351] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256511, 'name': Rename_Task, 'duration_secs': 0.227603} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.985639] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1101.985899] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a25fa39-0cf8-407d-8a4e-19265c58a141 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.993568] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1101.993568] env[61545]: value = "task-4256512" [ 1101.993568] env[61545]: _type = "Task" [ 1101.993568] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.003476] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256512, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.032580] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.033357] env[61545]: DEBUG nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1102.037131] env[61545]: DEBUG nova.network.neutron [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Updating instance_info_cache with network_info: [{"id": "169863f3-63da-4e6e-8dbb-514951796bf8", "address": "fa:16:3e:23:00:f0", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap169863f3-63", "ovs_interfaceid": "169863f3-63da-4e6e-8dbb-514951796bf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.042020] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.630s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.042020] env[61545]: DEBUG nova.objects.instance [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1102.268500] env[61545]: DEBUG nova.compute.manager [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1102.268724] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1102.269697] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33132343-7798-4f0f-9310-430b8d1e3575 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.277991] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1102.278347] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8ca572e-aa05-4fd0-b7ff-34b844e6f6cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.288028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "97b72809-2a1e-4eda-af82-71cac2d79a64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.288028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.288028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.288028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.288028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.288028] env[61545]: DEBUG oslo_vmware.api [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1102.288028] env[61545]: value = "task-4256513" [ 1102.288028] env[61545]: _type = "Task" [ 1102.288028] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.288759] env[61545]: INFO nova.compute.manager [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Terminating instance [ 1102.303351] env[61545]: DEBUG oslo_vmware.api [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.471308] env[61545]: DEBUG nova.network.neutron [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [{"id": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "address": "fa:16:3e:e1:1e:c2", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce640f58-ba", "ovs_interfaceid": "ce640f58-ba75-4bd9-8c39-40145ff6ac4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.507699] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256512, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.544242] env[61545]: DEBUG nova.compute.utils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1102.549959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-79762f13-2f93-43ba-883b-9437c7732c04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.551261] env[61545]: DEBUG nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Instance network_info: |[{"id": "169863f3-63da-4e6e-8dbb-514951796bf8", "address": "fa:16:3e:23:00:f0", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap169863f3-63", "ovs_interfaceid": "169863f3-63da-4e6e-8dbb-514951796bf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1102.556660] env[61545]: DEBUG nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1102.556921] env[61545]: DEBUG nova.network.neutron [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1102.561415] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Acquired lock "refresh_cache-79762f13-2f93-43ba-883b-9437c7732c04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.561635] env[61545]: DEBUG nova.network.neutron [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Refreshing network info cache for port 169863f3-63da-4e6e-8dbb-514951796bf8 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1102.565566] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:00:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '169863f3-63da-4e6e-8dbb-514951796bf8', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1102.577043] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1102.580130] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1102.583054] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb03d627-aea0-4845-a352-fc77a2b0ec6c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.609218] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1102.609218] env[61545]: value = "task-4256514" [ 1102.609218] env[61545]: _type = "Task" [ 1102.609218] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.619847] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256514, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.707754] env[61545]: DEBUG nova.policy [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb00c18cd27541359ae0adf45f5c4171', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa2360863a5f4eff8a88eca0c88fa76d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1102.795653] env[61545]: DEBUG nova.compute.manager [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1102.795894] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1102.796798] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64edfe8f-ce04-46a0-a7c5-705b5cd32c5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.802620] env[61545]: DEBUG oslo_vmware.api [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256513, 'name': PowerOffVM_Task, 'duration_secs': 0.355462} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.805201] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1102.805384] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1102.805641] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1276401a-f4f0-4c16-a138-4fde6b34e741 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.809892] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1102.810483] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a46b9ae-9eb5-481b-8380-b3630c8adcb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.818452] env[61545]: DEBUG oslo_vmware.api [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1102.818452] env[61545]: value = "task-4256516" [ 1102.818452] env[61545]: _type = "Task" [ 1102.818452] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.826857] env[61545]: DEBUG oslo_vmware.api [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256516, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.893469] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1102.893857] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1102.894023] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Deleting the datastore file [datastore2] 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1102.894320] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc49ac3e-94ea-410e-8a63-1822f06db1ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.907362] env[61545]: DEBUG oslo_vmware.api [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for the task: (returnval){ [ 1102.907362] env[61545]: value = "task-4256517" [ 1102.907362] env[61545]: _type = "Task" [ 1102.907362] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.922632] env[61545]: DEBUG oslo_vmware.api [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256517, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.973925] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-97b72809-2a1e-4eda-af82-71cac2d79a64" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.007729] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256512, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.051604] env[61545]: DEBUG nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1103.079207] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82ac4761-14f0-4773-aa2f-5b16999b66e9 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.041s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.088132] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.862s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.088132] env[61545]: DEBUG nova.objects.instance [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lazy-loading 'resources' on Instance uuid db2d0e21-f6bb-4f61-8d54-e9191de13a59 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.124808] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256514, 'name': CreateVM_Task, 'duration_secs': 0.359333} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.125232] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1103.126465] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.126512] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.126849] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1103.127131] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f3ff2f1-7c98-4460-bbf9-2d1f75348eab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.133339] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1103.133339] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525e8154-f0e3-2cbc-a7a2-4717e672e65d" [ 1103.133339] env[61545]: _type = "Task" [ 1103.133339] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.144767] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525e8154-f0e3-2cbc-a7a2-4717e672e65d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.333797] env[61545]: DEBUG oslo_vmware.api [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256516, 'name': PowerOffVM_Task, 'duration_secs': 0.199722} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.335534] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1103.335747] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1103.336136] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57913b0d-3c69-45ef-a233-62da1f35f93d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.405850] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1103.406155] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1103.406304] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleting the datastore file [datastore2] 97b72809-2a1e-4eda-af82-71cac2d79a64 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1103.406582] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a54d8377-569b-4189-a7c3-eb93919d56d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.421447] env[61545]: DEBUG oslo_vmware.api [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Task: {'id': task-4256517, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201434} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.422897] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1103.423092] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1103.423288] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1103.423827] env[61545]: INFO nova.compute.manager [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1103.423827] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1103.423996] env[61545]: DEBUG oslo_vmware.api [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1103.423996] env[61545]: value = "task-4256519" [ 1103.423996] env[61545]: _type = "Task" [ 1103.423996] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.424139] env[61545]: DEBUG nova.compute.manager [-] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1103.424234] env[61545]: DEBUG nova.network.neutron [-] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1103.435321] env[61545]: DEBUG oslo_vmware.api [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256519, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.458897] env[61545]: DEBUG nova.network.neutron [-] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.478602] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03c26346-5fbd-4bd7-b528-ec8ededfdaa5 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-97b72809-2a1e-4eda-af82-71cac2d79a64-281269ec-aad9-4fd1-bf3e-ba8f1bc9923f" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.722s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.506475] env[61545]: DEBUG oslo_vmware.api [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256512, 'name': PowerOnVM_Task, 'duration_secs': 1.137553} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.506794] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.506999] env[61545]: INFO nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Took 11.67 seconds to spawn the instance on the hypervisor. [ 1103.507403] env[61545]: DEBUG nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.508269] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e67b5f-767a-4f36-9283-bad44846ca7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.649768] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525e8154-f0e3-2cbc-a7a2-4717e672e65d, 'name': SearchDatastore_Task, 'duration_secs': 0.013832} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.650072] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.650354] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1103.650645] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.654023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.654023] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1103.654023] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8eb6fd4a-96cf-460c-a83c-5386215d28c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.662675] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1103.662866] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1103.666364] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d359fea6-cc4f-4926-a71b-67c1b0657244 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.674615] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1103.674615] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524e4130-231a-7580-4360-1dba7dbfc131" [ 1103.674615] env[61545]: _type = "Task" [ 1103.674615] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.683921] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524e4130-231a-7580-4360-1dba7dbfc131, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.689408] env[61545]: INFO nova.compute.manager [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Resuming [ 1103.690012] env[61545]: DEBUG nova.objects.instance [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lazy-loading 'flavor' on Instance uuid f9c9c447-e676-4143-b329-fb6d71bcd553 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.799288] env[61545]: DEBUG nova.compute.manager [req-efb65216-a46e-4c61-acd3-201959e2fa44 req-28ee007a-7fe3-4e13-9dd6-db6dd7b23dc3 service nova] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Received event network-vif-deleted-989b3fc6-0843-488f-9af2-39bb487eb78a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1103.901249] env[61545]: DEBUG nova.network.neutron [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Updated VIF entry in instance network info cache for port 169863f3-63da-4e6e-8dbb-514951796bf8. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1103.901738] env[61545]: DEBUG nova.network.neutron [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Updating instance_info_cache with network_info: [{"id": "169863f3-63da-4e6e-8dbb-514951796bf8", "address": "fa:16:3e:23:00:f0", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap169863f3-63", "ovs_interfaceid": "169863f3-63da-4e6e-8dbb-514951796bf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.925744] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4702eab-6d9f-4e98-8f9c-1ae3f8b34a1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.939406] env[61545]: DEBUG oslo_vmware.api [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256519, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.447049} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.941255] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1103.941480] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1103.941662] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1103.941839] env[61545]: INFO nova.compute.manager [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1103.942107] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1103.942395] env[61545]: DEBUG nova.compute.manager [-] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1103.942761] env[61545]: DEBUG nova.network.neutron [-] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1103.945411] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b165cca8-ff4e-49a4-aa5b-9e6e9240cb1f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.981783] env[61545]: INFO nova.compute.manager [-] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Took 2.40 seconds to deallocate network for instance. [ 1103.985143] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110b18bf-d3e2-408b-b56e-28c85a24612d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.998622] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f34a99-e61d-4b3e-8090-f272473465e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.018506] env[61545]: DEBUG nova.compute.provider_tree [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1104.027728] env[61545]: INFO nova.compute.manager [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Took 24.33 seconds to build instance. [ 1104.032212] env[61545]: DEBUG nova.network.neutron [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Successfully created port: 55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1104.071224] env[61545]: DEBUG nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1104.100658] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1104.101069] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.101346] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1104.101657] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.101912] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1104.102373] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1104.102626] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1104.102788] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1104.102957] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1104.103135] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1104.103307] env[61545]: DEBUG nova.virt.hardware [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1104.104607] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06945a4-6d92-4c48-a20a-0b706f29385c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.113890] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a36800-9999-4acb-aeda-e1848b4e4706 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.195406] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524e4130-231a-7580-4360-1dba7dbfc131, 'name': SearchDatastore_Task, 'duration_secs': 0.040699} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.199684] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c83f07a4-e4a5-4100-8750-288ef653c5e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.215534] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1104.215534] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523bbd19-1357-c6c3-379c-2dc72459b644" [ 1104.215534] env[61545]: _type = "Task" [ 1104.215534] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.225761] env[61545]: DEBUG nova.network.neutron [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Successfully updated port: 6070a5d2-428a-463d-9f30-9d446eb6a23a {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1104.235368] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523bbd19-1357-c6c3-379c-2dc72459b644, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.404339] env[61545]: DEBUG oslo_concurrency.lockutils [req-ab5b72fc-4529-4295-933a-b8c6b3809137 req-71d160b5-48b7-4891-a6f5-8a5ab7392089 service nova] Releasing lock "refresh_cache-79762f13-2f93-43ba-883b-9437c7732c04" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.492976] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.532991] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cea6588-0461-4378-8913-1108b34132cc tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.845s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.557809] env[61545]: DEBUG nova.scheduler.client.report [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1104.558169] env[61545]: DEBUG nova.compute.provider_tree [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 125 to 126 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1104.558404] env[61545]: DEBUG nova.compute.provider_tree [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1104.730067] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523bbd19-1357-c6c3-379c-2dc72459b644, 'name': SearchDatastore_Task, 'duration_secs': 0.031384} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.730400] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.730664] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 79762f13-2f93-43ba-883b-9437c7732c04/79762f13-2f93-43ba-883b-9437c7732c04.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1104.730952] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6d262eb-b4be-4771-b62f-57dcb0ea46ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.735434] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "refresh_cache-a7967300-6760-4310-bf48-00ddcaac3ee8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.735658] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "refresh_cache-a7967300-6760-4310-bf48-00ddcaac3ee8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.736191] env[61545]: DEBUG nova.network.neutron [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.747754] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1104.747754] env[61545]: value = "task-4256520" [ 1104.747754] env[61545]: _type = "Task" [ 1104.747754] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.762972] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.068645] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.982s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.073826] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.487s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.074283] env[61545]: DEBUG nova.objects.instance [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'resources' on Instance uuid 2f8567b1-7291-4705-8ef3-23547eb4860e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.107736] env[61545]: INFO nova.scheduler.client.report [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Deleted allocations for instance db2d0e21-f6bb-4f61-8d54-e9191de13a59 [ 1105.206570] env[61545]: DEBUG oslo_concurrency.lockutils [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.206570] env[61545]: DEBUG oslo_concurrency.lockutils [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquired lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.206570] env[61545]: DEBUG nova.network.neutron [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1105.266037] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256520, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.304329] env[61545]: DEBUG nova.network.neutron [-] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.339768] env[61545]: DEBUG nova.network.neutron [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1105.622148] env[61545]: DEBUG oslo_concurrency.lockutils [None req-809e15f0-4e5f-4d19-bfec-95e00f9b6d44 tempest-ImagesTestJSON-711834106 tempest-ImagesTestJSON-711834106-project-member] Lock "db2d0e21-f6bb-4f61-8d54-e9191de13a59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.423s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.761784] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.879472} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.762104] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 79762f13-2f93-43ba-883b-9437c7732c04/79762f13-2f93-43ba-883b-9437c7732c04.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1105.762348] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1105.762746] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b200ff1-2111-43c6-a506-21545ed9bb52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.783627] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1105.783627] env[61545]: value = "task-4256521" [ 1105.783627] env[61545]: _type = "Task" [ 1105.783627] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.798517] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256521, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.800170] env[61545]: DEBUG nova.network.neutron [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Updating instance_info_cache with network_info: [{"id": "6070a5d2-428a-463d-9f30-9d446eb6a23a", "address": "fa:16:3e:7e:6f:c4", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6070a5d2-42", "ovs_interfaceid": "6070a5d2-428a-463d-9f30-9d446eb6a23a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.807107] env[61545]: INFO nova.compute.manager [-] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Took 2.38 seconds to deallocate network for instance. [ 1105.832364] env[61545]: DEBUG nova.compute.manager [req-61bc719c-d559-4ef2-8c09-4a382cd7a98e req-adf59395-c9b6-488d-acf5-3e8f4f337229 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Received event network-changed-b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1105.834252] env[61545]: DEBUG nova.compute.manager [req-61bc719c-d559-4ef2-8c09-4a382cd7a98e req-adf59395-c9b6-488d-acf5-3e8f4f337229 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Refreshing instance network info cache due to event network-changed-b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1105.834252] env[61545]: DEBUG oslo_concurrency.lockutils [req-61bc719c-d559-4ef2-8c09-4a382cd7a98e req-adf59395-c9b6-488d-acf5-3e8f4f337229 service nova] Acquiring lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.834252] env[61545]: DEBUG oslo_concurrency.lockutils [req-61bc719c-d559-4ef2-8c09-4a382cd7a98e req-adf59395-c9b6-488d-acf5-3e8f4f337229 service nova] Acquired lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.834252] env[61545]: DEBUG nova.network.neutron [req-61bc719c-d559-4ef2-8c09-4a382cd7a98e req-adf59395-c9b6-488d-acf5-3e8f4f337229 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Refreshing network info cache for port b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1105.913228] env[61545]: DEBUG nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Received event network-vif-plugged-6070a5d2-428a-463d-9f30-9d446eb6a23a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1105.913635] env[61545]: DEBUG oslo_concurrency.lockutils [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] Acquiring lock "a7967300-6760-4310-bf48-00ddcaac3ee8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.914129] env[61545]: DEBUG oslo_concurrency.lockutils [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] Lock "a7967300-6760-4310-bf48-00ddcaac3ee8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.914444] env[61545]: DEBUG oslo_concurrency.lockutils [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] Lock "a7967300-6760-4310-bf48-00ddcaac3ee8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.914748] env[61545]: DEBUG nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] No waiting events found dispatching network-vif-plugged-6070a5d2-428a-463d-9f30-9d446eb6a23a {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1105.915062] env[61545]: WARNING nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Received unexpected event network-vif-plugged-6070a5d2-428a-463d-9f30-9d446eb6a23a for instance with vm_state building and task_state spawning. [ 1105.915343] env[61545]: DEBUG nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Received event network-changed-6070a5d2-428a-463d-9f30-9d446eb6a23a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1105.915641] env[61545]: DEBUG nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Refreshing instance network info cache due to event network-changed-6070a5d2-428a-463d-9f30-9d446eb6a23a. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1105.915959] env[61545]: DEBUG oslo_concurrency.lockutils [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] Acquiring lock "refresh_cache-a7967300-6760-4310-bf48-00ddcaac3ee8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.964242] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1b71ab-f468-4db4-8f07-8ce4fb9c9403 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.975022] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a1fb20-2992-4fcc-bdcc-5eace3766d36 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.015287] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abce435-db6b-48a2-95ac-49663918fa77 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.024888] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e5de1a-e85f-4d6c-8cff-1b121c01ac4c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.043267] env[61545]: DEBUG nova.compute.provider_tree [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1106.195416] env[61545]: DEBUG nova.network.neutron [-] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.197481] env[61545]: DEBUG nova.network.neutron [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating instance_info_cache with network_info: [{"id": "2147b830-281d-4a24-90d1-22eccefc4c5c", "address": "fa:16:3e:57:07:3e", "network": {"id": "f80ffc60-31e4-4130-b375-a4504a9cc51b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1551552556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0f97aab169448c5a0d956b1b33e1ac2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2147b830-28", "ovs_interfaceid": "2147b830-281d-4a24-90d1-22eccefc4c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.295784] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256521, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084153} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.297409] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1106.298342] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63ad19b-af32-4d8f-9973-c39e76793454 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.316655] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "refresh_cache-a7967300-6760-4310-bf48-00ddcaac3ee8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.316845] env[61545]: DEBUG nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Instance network_info: |[{"id": "6070a5d2-428a-463d-9f30-9d446eb6a23a", "address": "fa:16:3e:7e:6f:c4", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6070a5d2-42", "ovs_interfaceid": "6070a5d2-428a-463d-9f30-9d446eb6a23a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1106.317685] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.331233] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 79762f13-2f93-43ba-883b-9437c7732c04/79762f13-2f93-43ba-883b-9437c7732c04.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.331233] env[61545]: DEBUG oslo_concurrency.lockutils [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] Acquired lock "refresh_cache-a7967300-6760-4310-bf48-00ddcaac3ee8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.331233] env[61545]: DEBUG nova.network.neutron [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Refreshing network info cache for port 6070a5d2-428a-463d-9f30-9d446eb6a23a {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1106.331233] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:6f:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6070a5d2-428a-463d-9f30-9d446eb6a23a', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1106.338455] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.338673] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab4f48d4-e07d-4463-81db-c5f89a9309aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.358268] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1106.359383] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d926d66f-6eb0-4a33-a78f-333680117d12 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.383027] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1106.383027] env[61545]: value = "task-4256523" [ 1106.383027] env[61545]: _type = "Task" [ 1106.383027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.383027] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1106.383027] env[61545]: value = "task-4256522" [ 1106.383027] env[61545]: _type = "Task" [ 1106.383027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.395728] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256523, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.399605] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256522, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.584536] env[61545]: DEBUG nova.scheduler.client.report [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1106.585024] env[61545]: DEBUG nova.compute.provider_tree [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 126 to 127 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1106.585135] env[61545]: DEBUG nova.compute.provider_tree [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1106.697493] env[61545]: INFO nova.compute.manager [-] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Took 2.75 seconds to deallocate network for instance. [ 1106.699705] env[61545]: DEBUG oslo_concurrency.lockutils [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Releasing lock "refresh_cache-f9c9c447-e676-4143-b329-fb6d71bcd553" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.702400] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3873cf5b-93fc-4643-808a-4096b3895072 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.715026] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Resuming the VM {{(pid=61545) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1106.715026] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22b51970-8034-4776-83f4-6085f6f2356c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.719806] env[61545]: DEBUG oslo_vmware.api [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1106.719806] env[61545]: value = "task-4256524" [ 1106.719806] env[61545]: _type = "Task" [ 1106.719806] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.732723] env[61545]: DEBUG oslo_vmware.api [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256524, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.896687] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256522, 'name': ReconfigVM_Task, 'duration_secs': 0.331936} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.900122] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 79762f13-2f93-43ba-883b-9437c7732c04/79762f13-2f93-43ba-883b-9437c7732c04.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.900784] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256523, 'name': CreateVM_Task, 'duration_secs': 0.445907} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.901013] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da7dfefd-3953-4362-8ba0-3e9d5f77ea2f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.904718] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1106.904718] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.904718] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.904718] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1106.904718] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9f26a65-dd76-46c9-8287-ec37f43d59c7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.911270] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1106.911270] env[61545]: value = "task-4256525" [ 1106.911270] env[61545]: _type = "Task" [ 1106.911270] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.913276] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1106.913276] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251556e-4732-ce49-26a7-8a8907e79bd1" [ 1106.913276] env[61545]: _type = "Task" [ 1106.913276] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.931134] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256525, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.939593] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5251556e-4732-ce49-26a7-8a8907e79bd1, 'name': SearchDatastore_Task, 'duration_secs': 0.013077} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.939593] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.939811] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.940564] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.940564] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.940564] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.940941] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cba4774-f7db-4cc5-a412-044d9d838e67 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.956523] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.956523] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.956523] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9b49617-3493-4364-82ac-1c2a4a2c4641 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.962571] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1106.962571] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526dfb29-20b5-8ef5-0208-9b1a8130f426" [ 1106.962571] env[61545]: _type = "Task" [ 1106.962571] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.971169] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526dfb29-20b5-8ef5-0208-9b1a8130f426, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.090888] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.017s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.093598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.183s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.094018] env[61545]: DEBUG nova.objects.instance [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lazy-loading 'resources' on Instance uuid e21de424-8121-4e2f-84c2-8096ba8048cc {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1107.125960] env[61545]: INFO nova.scheduler.client.report [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted allocations for instance 2f8567b1-7291-4705-8ef3-23547eb4860e [ 1107.197015] env[61545]: DEBUG nova.network.neutron [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Updated VIF entry in instance network info cache for port 6070a5d2-428a-463d-9f30-9d446eb6a23a. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1107.197346] env[61545]: DEBUG nova.network.neutron [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Updating instance_info_cache with network_info: [{"id": "6070a5d2-428a-463d-9f30-9d446eb6a23a", "address": "fa:16:3e:7e:6f:c4", "network": {"id": "29178cd3-1aa9-467b-83f0-93fb74dcfe63", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1927628515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4ccb92c3be47f18fd65a22a5a1ad94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6070a5d2-42", "ovs_interfaceid": "6070a5d2-428a-463d-9f30-9d446eb6a23a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.210324] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.230904] env[61545]: DEBUG oslo_vmware.api [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256524, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.380361] env[61545]: DEBUG nova.network.neutron [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Successfully updated port: 55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.388799] env[61545]: DEBUG nova.network.neutron [req-61bc719c-d559-4ef2-8c09-4a382cd7a98e req-adf59395-c9b6-488d-acf5-3e8f4f337229 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updated VIF entry in instance network info cache for port b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1107.390805] env[61545]: DEBUG nova.network.neutron [req-61bc719c-d559-4ef2-8c09-4a382cd7a98e req-adf59395-c9b6-488d-acf5-3e8f4f337229 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance_info_cache with network_info: [{"id": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "address": "fa:16:3e:ed:19:86", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb793a557-df", "ovs_interfaceid": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.427971] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256525, 'name': Rename_Task, 'duration_secs': 0.150032} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.427971] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1107.427971] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6db17187-7dce-43b1-b393-ad500b1271f0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.437514] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1107.437514] env[61545]: value = "task-4256526" [ 1107.437514] env[61545]: _type = "Task" [ 1107.437514] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.448664] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256526, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.477261] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526dfb29-20b5-8ef5-0208-9b1a8130f426, 'name': SearchDatastore_Task, 'duration_secs': 0.011174} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.477850] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8b04d86-5396-4e4b-885f-ab3301ab3e22 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.486715] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1107.486715] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c514a6-ad37-dbd8-b672-87f517b07a1a" [ 1107.486715] env[61545]: _type = "Task" [ 1107.486715] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.499390] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c514a6-ad37-dbd8-b672-87f517b07a1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.636262] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c22f8a89-722a-4522-9a4e-8770bc454e74 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "2f8567b1-7291-4705-8ef3-23547eb4860e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.128s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.700739] env[61545]: DEBUG oslo_concurrency.lockutils [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] Releasing lock "refresh_cache-a7967300-6760-4310-bf48-00ddcaac3ee8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.701115] env[61545]: DEBUG nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Received event network-vif-deleted-208c9c7b-7408-4bea-a6d1-34d97f61dabf {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1107.701343] env[61545]: DEBUG nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Received event network-vif-deleted-ce640f58-ba75-4bd9-8c39-40145ff6ac4e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1107.701536] env[61545]: INFO nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Neutron deleted interface ce640f58-ba75-4bd9-8c39-40145ff6ac4e; detaching it from the instance and deleting it from the info cache [ 1107.701713] env[61545]: DEBUG nova.network.neutron [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.738510] env[61545]: DEBUG oslo_vmware.api [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256524, 'name': PowerOnVM_Task, 'duration_secs': 0.865617} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.738845] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Resumed the VM {{(pid=61545) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1107.739090] env[61545]: DEBUG nova.compute.manager [None req-002c1908-f490-4383-9bea-364da53640a7 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1107.740064] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8362f191-18ae-4ff4-b21f-b26c1c719413 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.823053] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e72272-06b1-4180-8761-da50a1b9d494 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.831430] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d44f192-180f-4063-9baf-ddc33ed3238a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.864196] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9395f7-fd6c-4f9c-b33e-f8f64cec2cd3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.872900] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c096f9d-ac61-4bdb-b851-41b3dee574c3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.889627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.889784] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.889948] env[61545]: DEBUG nova.network.neutron [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1107.891395] env[61545]: DEBUG nova.compute.provider_tree [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.894245] env[61545]: DEBUG oslo_concurrency.lockutils [req-61bc719c-d559-4ef2-8c09-4a382cd7a98e req-adf59395-c9b6-488d-acf5-3e8f4f337229 service nova] Releasing lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.948755] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256526, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.998293] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c514a6-ad37-dbd8-b672-87f517b07a1a, 'name': SearchDatastore_Task, 'duration_secs': 0.0388} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.998588] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.998862] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a7967300-6760-4310-bf48-00ddcaac3ee8/a7967300-6760-4310-bf48-00ddcaac3ee8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.999265] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61b667b7-4157-4413-b0fa-eead3d1a7994 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.008473] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1108.008473] env[61545]: value = "task-4256527" [ 1108.008473] env[61545]: _type = "Task" [ 1108.008473] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.019359] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.089707] env[61545]: DEBUG nova.compute.manager [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Received event network-vif-plugged-55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1108.089991] env[61545]: DEBUG oslo_concurrency.lockutils [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] Acquiring lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.090238] env[61545]: DEBUG oslo_concurrency.lockutils [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.090431] env[61545]: DEBUG oslo_concurrency.lockutils [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.090628] env[61545]: DEBUG nova.compute.manager [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] No waiting events found dispatching network-vif-plugged-55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1108.090804] env[61545]: WARNING nova.compute.manager [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Received unexpected event network-vif-plugged-55458c7f-c486-49fb-966b-0478ed8948ee for instance with vm_state building and task_state spawning. [ 1108.091055] env[61545]: DEBUG nova.compute.manager [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Received event network-changed-55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1108.091212] env[61545]: DEBUG nova.compute.manager [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Refreshing instance network info cache due to event network-changed-55458c7f-c486-49fb-966b-0478ed8948ee. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1108.091328] env[61545]: DEBUG oslo_concurrency.lockutils [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] Acquiring lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.207438] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-614ed54f-b462-4345-86d0-e6d33ae54b81 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.220171] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187f4555-2bf2-4dd0-aaa5-bdbac4056f46 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.258462] env[61545]: DEBUG nova.compute.manager [req-26fcd4f4-e3d2-478a-8140-8f753f541293 req-e2cd4308-36cc-4ff7-997f-b0ccefbf4d80 service nova] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Detach interface failed, port_id=ce640f58-ba75-4bd9-8c39-40145ff6ac4e, reason: Instance 97b72809-2a1e-4eda-af82-71cac2d79a64 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1108.398051] env[61545]: DEBUG nova.scheduler.client.report [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.455252] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256526, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.499488] env[61545]: DEBUG nova.network.neutron [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1108.525169] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256527, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.902600] env[61545]: DEBUG nova.network.neutron [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance_info_cache with network_info: [{"id": "55458c7f-c486-49fb-966b-0478ed8948ee", "address": "fa:16:3e:e2:4c:9e", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55458c7f-c4", "ovs_interfaceid": "55458c7f-c486-49fb-966b-0478ed8948ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.905170] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.911246] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.113s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.911246] env[61545]: INFO nova.compute.claims [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.945970] env[61545]: INFO nova.scheduler.client.report [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleted allocations for instance e21de424-8121-4e2f-84c2-8096ba8048cc [ 1108.958228] env[61545]: DEBUG oslo_vmware.api [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256526, 'name': PowerOnVM_Task, 'duration_secs': 1.082629} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.959929] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1108.960152] env[61545]: INFO nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Took 10.83 seconds to spawn the instance on the hypervisor. [ 1108.960344] env[61545]: DEBUG nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1108.961193] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271c312b-52a9-46a6-bb4d-9017deca3346 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.026081] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256527, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.190864] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.191126] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.403874] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.404181] env[61545]: DEBUG nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Instance network_info: |[{"id": "55458c7f-c486-49fb-966b-0478ed8948ee", "address": "fa:16:3e:e2:4c:9e", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55458c7f-c4", "ovs_interfaceid": "55458c7f-c486-49fb-966b-0478ed8948ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1109.404520] env[61545]: DEBUG oslo_concurrency.lockutils [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] Acquired lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.404705] env[61545]: DEBUG nova.network.neutron [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Refreshing network info cache for port 55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.406854] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:4c:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55458c7f-c486-49fb-966b-0478ed8948ee', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1109.420189] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1109.423579] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1109.424520] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02c2764d-9057-4981-9f5d-8b29b50ec604 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.450111] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1109.450111] env[61545]: value = "task-4256528" [ 1109.450111] env[61545]: _type = "Task" [ 1109.450111] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.458129] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2b26c3f0-36a3-4e5d-8c32-a9bc4f9f1472 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "e21de424-8121-4e2f-84c2-8096ba8048cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.003s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.462943] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256528, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.486856] env[61545]: INFO nova.compute.manager [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Took 27.65 seconds to build instance. [ 1109.523920] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256527, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.704209] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.704440] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.704637] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.704821] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.704942] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.705104] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.705244] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1109.705391] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.966170] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256528, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.991646] env[61545]: DEBUG oslo_concurrency.lockutils [None req-49f0236a-1506-4ccb-9e35-dd0efef2eeb3 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "79762f13-2f93-43ba-883b-9437c7732c04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.166s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.024690] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256527, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.744791} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.032015] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a7967300-6760-4310-bf48-00ddcaac3ee8/a7967300-6760-4310-bf48-00ddcaac3ee8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1110.032295] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1110.033669] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5372cd15-42f4-4132-a94a-0f7c43787385 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.044230] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1110.044230] env[61545]: value = "task-4256529" [ 1110.044230] env[61545]: _type = "Task" [ 1110.044230] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.057246] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.174848] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a76508-b5d7-46e2-b14d-5406f33389c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.183593] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cca4a2-e0da-4261-9138-5314affbb2d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.218864] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.220179] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb209098-adbf-49ad-9026-506a07b20206 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.229343] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5288660-73c9-4c7b-b10f-4e33b0baab07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.246873] env[61545]: DEBUG nova.compute.provider_tree [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1110.389766] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.390152] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.390458] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.390678] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.391172] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.393348] env[61545]: INFO nova.compute.manager [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Terminating instance [ 1110.462269] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256528, 'name': CreateVM_Task, 'duration_secs': 0.957965} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.462414] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1110.463137] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.463306] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.463638] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1110.464216] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cd2714c-42c6-4756-892a-4a781951c538 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.471699] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1110.471699] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c0083d-4c81-b653-c9da-a4ddb406f3a3" [ 1110.471699] env[61545]: _type = "Task" [ 1110.471699] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.482638] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c0083d-4c81-b653-c9da-a4ddb406f3a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.518177] env[61545]: DEBUG nova.network.neutron [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updated VIF entry in instance network info cache for port 55458c7f-c486-49fb-966b-0478ed8948ee. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1110.518662] env[61545]: DEBUG nova.network.neutron [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance_info_cache with network_info: [{"id": "55458c7f-c486-49fb-966b-0478ed8948ee", "address": "fa:16:3e:e2:4c:9e", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55458c7f-c4", "ovs_interfaceid": "55458c7f-c486-49fb-966b-0478ed8948ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.555260] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122592} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.555558] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1110.556348] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a862a3f-a4a7-4a56-ba73-899e014351b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.581293] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] a7967300-6760-4310-bf48-00ddcaac3ee8/a7967300-6760-4310-bf48-00ddcaac3ee8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1110.581664] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-988a06bf-9373-4790-9a55-f0b113a24893 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.602518] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1110.602518] env[61545]: value = "task-4256530" [ 1110.602518] env[61545]: _type = "Task" [ 1110.602518] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.614437] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.801984] env[61545]: DEBUG nova.scheduler.client.report [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 127 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1110.802321] env[61545]: DEBUG nova.compute.provider_tree [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 127 to 128 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1110.802543] env[61545]: DEBUG nova.compute.provider_tree [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1110.898126] env[61545]: DEBUG nova.compute.manager [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1110.898384] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1110.899424] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef719a58-61d4-4949-befb-f15126484529 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.909990] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1110.910636] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da391e50-786a-4759-bca0-b90df333262c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.920748] env[61545]: DEBUG oslo_vmware.api [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1110.920748] env[61545]: value = "task-4256531" [ 1110.920748] env[61545]: _type = "Task" [ 1110.920748] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.929909] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "79762f13-2f93-43ba-883b-9437c7732c04" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.930254] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "79762f13-2f93-43ba-883b-9437c7732c04" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.930632] env[61545]: DEBUG nova.compute.manager [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1110.930982] env[61545]: DEBUG oslo_vmware.api [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.932329] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a245c2-6288-4dba-a5d9-035b26b2e783 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.939653] env[61545]: DEBUG nova.compute.manager [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1110.940388] env[61545]: DEBUG nova.objects.instance [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'flavor' on Instance uuid 79762f13-2f93-43ba-883b-9437c7732c04 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.993815] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c0083d-4c81-b653-c9da-a4ddb406f3a3, 'name': SearchDatastore_Task, 'duration_secs': 0.022991} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.994221] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.994681] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1110.995309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.995309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.995506] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1110.995904] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10329ad1-9fd1-4c35-b82f-e69b29f26a0d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.024024] env[61545]: DEBUG oslo_concurrency.lockutils [req-6f68bc6c-f573-4edf-acb7-aa81ef1262fd req-26d99f0f-1373-4c07-9ecc-6af9a2f80204 service nova] Releasing lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.024024] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.024024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1111.024024] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fbbb9e4-dd6b-4356-8e00-547d75cd08a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.031441] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1111.031441] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52945530-c5fa-a79e-06e2-81e0eb136311" [ 1111.031441] env[61545]: _type = "Task" [ 1111.031441] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.043971] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52945530-c5fa-a79e-06e2-81e0eb136311, 'name': SearchDatastore_Task, 'duration_secs': 0.011145} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.045312] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89e28253-5153-400d-9a91-43031bb66f25 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.052084] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1111.052084] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282433e-d813-8787-4cc8-863da2d802b8" [ 1111.052084] env[61545]: _type = "Task" [ 1111.052084] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.062154] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282433e-d813-8787-4cc8-863da2d802b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.115823] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256530, 'name': ReconfigVM_Task, 'duration_secs': 0.424161} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.116719] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Reconfigured VM instance instance-0000005a to attach disk [datastore2] a7967300-6760-4310-bf48-00ddcaac3ee8/a7967300-6760-4310-bf48-00ddcaac3ee8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1111.117464] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aefac99e-eb2a-4305-9149-31b0e232b722 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.125814] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1111.125814] env[61545]: value = "task-4256532" [ 1111.125814] env[61545]: _type = "Task" [ 1111.125814] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.140551] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256532, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.308722] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.309396] env[61545]: DEBUG nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1111.313529] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.320s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.318025] env[61545]: DEBUG nova.objects.instance [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lazy-loading 'resources' on Instance uuid 91eeceeb-c11e-414b-8ae6-e68e927f1f1e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.431505] env[61545]: DEBUG oslo_vmware.api [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256531, 'name': PowerOffVM_Task, 'duration_secs': 0.218567} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.431814] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1111.431992] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1111.432282] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff2b9626-d189-4d19-b905-1fcdc5424e5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.529298] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1111.529548] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1111.529754] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleting the datastore file [datastore2] 16bc91d0-71c3-4bd9-980b-6574c3fd9335 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1111.534416] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db4dd2d9-aa5d-4763-8412-692cfad34b69 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.542842] env[61545]: DEBUG oslo_vmware.api [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for the task: (returnval){ [ 1111.542842] env[61545]: value = "task-4256534" [ 1111.542842] env[61545]: _type = "Task" [ 1111.542842] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.554309] env[61545]: DEBUG oslo_vmware.api [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.564522] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282433e-d813-8787-4cc8-863da2d802b8, 'name': SearchDatastore_Task, 'duration_secs': 0.011741} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.564809] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.565099] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4/12aed0d0-b5dd-4f1b-913a-000c06a8eab4.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1111.565418] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a69e249-828d-41d0-8b42-346b064aaa79 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.575542] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1111.575542] env[61545]: value = "task-4256535" [ 1111.575542] env[61545]: _type = "Task" [ 1111.575542] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.592460] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.636638] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256532, 'name': Rename_Task, 'duration_secs': 0.178012} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.637051] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.637330] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45cc387b-fa53-4362-8237-184adfdcbfd6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.648059] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1111.648059] env[61545]: value = "task-4256536" [ 1111.648059] env[61545]: _type = "Task" [ 1111.648059] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.657383] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256536, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.820185] env[61545]: DEBUG nova.compute.utils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.822588] env[61545]: DEBUG nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1111.825031] env[61545]: DEBUG nova.network.neutron [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1111.889996] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.890472] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.950547] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.950874] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60f5d69d-0e1f-49d6-9b5a-7c5c5ee39471 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.955801] env[61545]: DEBUG nova.policy [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9bcc01a701c4b728d810b0b27ce6249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aeb51ace7650413b987be7ddd7490182', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1111.963753] env[61545]: DEBUG oslo_vmware.api [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1111.963753] env[61545]: value = "task-4256537" [ 1111.963753] env[61545]: _type = "Task" [ 1111.963753] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.979319] env[61545]: DEBUG oslo_vmware.api [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.058766] env[61545]: DEBUG oslo_vmware.api [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Task: {'id': task-4256534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263956} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.063154] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1112.063438] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1112.063670] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1112.063896] env[61545]: INFO nova.compute.manager [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1112.064272] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1112.064838] env[61545]: DEBUG nova.compute.manager [-] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1112.064982] env[61545]: DEBUG nova.network.neutron [-] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1112.094479] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256535, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.119983] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83e7e7c-3c3d-489a-a446-373ab4926df0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.131057] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249cb236-8285-41b3-bf27-100bb4c98874 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.179319] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552d62eb-9ce4-488e-bab1-3e81f3f7decd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.193942] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3d756b-beed-460d-a2e0-f8839e352983 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.205263] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256536, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.218619] env[61545]: DEBUG nova.compute.provider_tree [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.331725] env[61545]: DEBUG nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1112.393321] env[61545]: DEBUG nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1112.473592] env[61545]: DEBUG oslo_vmware.api [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256537, 'name': PowerOffVM_Task, 'duration_secs': 0.285123} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.473882] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1112.474199] env[61545]: DEBUG nova.compute.manager [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.474913] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a538a510-40ba-47db-ab77-60da8ba3e667 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.592384] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256535, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582024} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.592835] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4/12aed0d0-b5dd-4f1b-913a-000c06a8eab4.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1112.592914] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1112.593220] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9fcd9cc-a875-4931-8207-259ba760d9f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.617249] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1112.617249] env[61545]: value = "task-4256538" [ 1112.617249] env[61545]: _type = "Task" [ 1112.617249] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.630826] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256538, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.685642] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256536, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.724913] env[61545]: DEBUG nova.scheduler.client.report [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.922046] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.987746] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b61b6ce5-0df8-4c16-b70e-7539ca16c1e2 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "79762f13-2f93-43ba-883b-9437c7732c04" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.103697] env[61545]: DEBUG nova.network.neutron [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Successfully created port: f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1113.126517] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256538, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08459} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.126844] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1113.128144] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a121c0a6-965b-489f-a00b-c1f820bbb28d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.164514] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4/12aed0d0-b5dd-4f1b-913a-000c06a8eab4.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.164877] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6e5ddc9-56d0-4d71-9f2d-f619a7c9807d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.185931] env[61545]: DEBUG nova.compute.manager [req-33f04ab7-9d52-41b6-a2e7-ae9de5938ba6 req-9156f37b-ec92-4cf7-9869-44edacadd278 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Received event network-vif-deleted-ab535fb5-b111-46f9-8c40-e9647f50901b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1113.186081] env[61545]: INFO nova.compute.manager [req-33f04ab7-9d52-41b6-a2e7-ae9de5938ba6 req-9156f37b-ec92-4cf7-9869-44edacadd278 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Neutron deleted interface ab535fb5-b111-46f9-8c40-e9647f50901b; detaching it from the instance and deleting it from the info cache [ 1113.186256] env[61545]: DEBUG nova.network.neutron [req-33f04ab7-9d52-41b6-a2e7-ae9de5938ba6 req-9156f37b-ec92-4cf7-9869-44edacadd278 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.199940] env[61545]: DEBUG oslo_vmware.api [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256536, 'name': PowerOnVM_Task, 'duration_secs': 1.047206} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.201987] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1113.202227] env[61545]: INFO nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Took 11.66 seconds to spawn the instance on the hypervisor. [ 1113.202407] env[61545]: DEBUG nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1113.202742] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1113.202742] env[61545]: value = "task-4256539" [ 1113.202742] env[61545]: _type = "Task" [ 1113.202742] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.203470] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7339e659-37ac-4003-8fae-92646e2d9c1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.220627] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256539, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.233863] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.236469] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.743s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.236698] env[61545]: DEBUG nova.objects.instance [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'resources' on Instance uuid c2bb4ea0-e9fb-4198-80fa-acfd25fb226d {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1113.263106] env[61545]: INFO nova.scheduler.client.report [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Deleted allocations for instance 91eeceeb-c11e-414b-8ae6-e68e927f1f1e [ 1113.343617] env[61545]: DEBUG nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1113.387510] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1113.387756] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.387889] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1113.388096] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.388240] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1113.388449] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1113.388678] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1113.388932] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1113.389069] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1113.389250] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1113.389426] env[61545]: DEBUG nova.virt.hardware [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1113.390654] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da990be8-72cc-4dfc-b5cb-8927f132dedb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.400837] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b743aa07-705f-4b00-acf0-0e16c65bcbbb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.529229] env[61545]: DEBUG nova.network.neutron [-] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.696027] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8da0e3e8-5857-4fe2-80be-c2a3c0df2a0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.708419] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d498fb5-0216-4cb5-a3ef-c5c6dc8e3ee3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.739812] env[61545]: INFO nova.compute.manager [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Took 28.80 seconds to build instance. [ 1113.748273] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256539, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.764303] env[61545]: DEBUG nova.compute.manager [req-33f04ab7-9d52-41b6-a2e7-ae9de5938ba6 req-9156f37b-ec92-4cf7-9869-44edacadd278 service nova] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Detach interface failed, port_id=ab535fb5-b111-46f9-8c40-e9647f50901b, reason: Instance 16bc91d0-71c3-4bd9-980b-6574c3fd9335 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1113.776603] env[61545]: DEBUG oslo_concurrency.lockutils [None req-06374aa5-19f7-433c-9c13-0aca062406f4 tempest-ServersTestJSON-1123416272 tempest-ServersTestJSON-1123416272-project-member] Lock "91eeceeb-c11e-414b-8ae6-e68e927f1f1e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.610s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.011728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "79762f13-2f93-43ba-883b-9437c7732c04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.011728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "79762f13-2f93-43ba-883b-9437c7732c04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.011728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "79762f13-2f93-43ba-883b-9437c7732c04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.011728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "79762f13-2f93-43ba-883b-9437c7732c04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.011728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "79762f13-2f93-43ba-883b-9437c7732c04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.016043] env[61545]: INFO nova.compute.manager [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Terminating instance [ 1114.034036] env[61545]: INFO nova.compute.manager [-] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Took 1.97 seconds to deallocate network for instance. [ 1114.076384] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4b39b7-d0f5-40b8-b3f0-20b6e8cc7824 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.084969] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48295d6-c1ff-4421-aa5e-d20774e53e06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.126903] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac698b79-7ac8-4ed1-bd91-25aa27a5ba5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.133882] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d53d2b-3485-4862-b7b9-ab2e4cb89dd5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.157882] env[61545]: DEBUG nova.compute.provider_tree [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.236022] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256539, 'name': ReconfigVM_Task, 'duration_secs': 0.718067} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.236022] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4/12aed0d0-b5dd-4f1b-913a-000c06a8eab4.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.236022] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d355df0d-b000-461e-a144-f52cdf125414 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.244021] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1114.244021] env[61545]: value = "task-4256540" [ 1114.244021] env[61545]: _type = "Task" [ 1114.244021] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.253148] env[61545]: DEBUG oslo_concurrency.lockutils [None req-38cc0d66-d625-4e2d-9495-573520d2d10a tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "a7967300-6760-4310-bf48-00ddcaac3ee8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.325s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.253148] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256540, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.519242] env[61545]: DEBUG nova.compute.manager [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1114.519910] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.521364] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f507743-d1d1-42d4-9b2d-e11035eab9d8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.531744] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1114.532363] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc220411-0c03-48ad-b8ea-599239f15e22 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.543130] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.616184] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1114.616531] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1114.616726] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleting the datastore file [datastore2] 79762f13-2f93-43ba-883b-9437c7732c04 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.617093] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c9c5203-cdbe-497c-9772-d1b981a8a506 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.625727] env[61545]: DEBUG oslo_vmware.api [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1114.625727] env[61545]: value = "task-4256542" [ 1114.625727] env[61545]: _type = "Task" [ 1114.625727] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.638960] env[61545]: DEBUG oslo_vmware.api [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256542, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.651067] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "a7967300-6760-4310-bf48-00ddcaac3ee8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.651067] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "a7967300-6760-4310-bf48-00ddcaac3ee8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.651067] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "a7967300-6760-4310-bf48-00ddcaac3ee8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.651067] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "a7967300-6760-4310-bf48-00ddcaac3ee8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.651067] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "a7967300-6760-4310-bf48-00ddcaac3ee8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.654294] env[61545]: INFO nova.compute.manager [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Terminating instance [ 1114.662667] env[61545]: DEBUG nova.scheduler.client.report [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.754741] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256540, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.139126] env[61545]: DEBUG oslo_vmware.api [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256542, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.286166} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.139126] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.139576] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1115.139576] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1115.139576] env[61545]: INFO nova.compute.manager [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1115.139777] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.140121] env[61545]: DEBUG nova.compute.manager [-] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1115.140121] env[61545]: DEBUG nova.network.neutron [-] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1115.161893] env[61545]: DEBUG nova.compute.manager [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1115.161893] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1115.162998] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05250ae6-b5af-4cbc-a05f-b658a1fc63f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.172467] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.936s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.178696] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1115.178696] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.859s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.178696] env[61545]: DEBUG nova.objects.instance [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lazy-loading 'resources' on Instance uuid 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.178696] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba4eaffc-c0e1-4904-a52e-2c11d4e90f51 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.191022] env[61545]: DEBUG oslo_vmware.api [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1115.191022] env[61545]: value = "task-4256543" [ 1115.191022] env[61545]: _type = "Task" [ 1115.191022] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.209792] env[61545]: DEBUG oslo_vmware.api [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.220414] env[61545]: INFO nova.scheduler.client.report [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Deleted allocations for instance c2bb4ea0-e9fb-4198-80fa-acfd25fb226d [ 1115.262086] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256540, 'name': Rename_Task, 'duration_secs': 0.904655} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.262086] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.262222] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3139b24-9025-4c15-b431-9675e0380bf3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.270065] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1115.270065] env[61545]: value = "task-4256544" [ 1115.270065] env[61545]: _type = "Task" [ 1115.270065] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.280038] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.513685] env[61545]: DEBUG nova.compute.manager [req-a9ecc6dc-e480-46bb-9e60-b8a4bbac6ff9 req-1f835dd0-c519-4ec5-91f9-382ef538e897 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Received event network-vif-plugged-f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1115.513906] env[61545]: DEBUG oslo_concurrency.lockutils [req-a9ecc6dc-e480-46bb-9e60-b8a4bbac6ff9 req-1f835dd0-c519-4ec5-91f9-382ef538e897 service nova] Acquiring lock "c61ca5f4-78ae-4626-977d-8c17dc12c012-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.514748] env[61545]: DEBUG oslo_concurrency.lockutils [req-a9ecc6dc-e480-46bb-9e60-b8a4bbac6ff9 req-1f835dd0-c519-4ec5-91f9-382ef538e897 service nova] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.518215] env[61545]: DEBUG oslo_concurrency.lockutils [req-a9ecc6dc-e480-46bb-9e60-b8a4bbac6ff9 req-1f835dd0-c519-4ec5-91f9-382ef538e897 service nova] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.518215] env[61545]: DEBUG nova.compute.manager [req-a9ecc6dc-e480-46bb-9e60-b8a4bbac6ff9 req-1f835dd0-c519-4ec5-91f9-382ef538e897 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] No waiting events found dispatching network-vif-plugged-f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1115.518215] env[61545]: WARNING nova.compute.manager [req-a9ecc6dc-e480-46bb-9e60-b8a4bbac6ff9 req-1f835dd0-c519-4ec5-91f9-382ef538e897 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Received unexpected event network-vif-plugged-f18fe5c7-64c8-4f58-b7c8-806d3e03985e for instance with vm_state building and task_state spawning. [ 1115.703597] env[61545]: DEBUG oslo_vmware.api [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256543, 'name': PowerOffVM_Task, 'duration_secs': 0.278493} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.703962] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.704440] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.704749] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e78ec49-b9e9-48a4-bfed-cf6b7f4399dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.732384] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98215a85-5321-4375-8989-49bdca58bb16 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "c2bb4ea0-e9fb-4198-80fa-acfd25fb226d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.873s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.780274] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.780274] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.780274] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleting the datastore file [datastore2] a7967300-6760-4310-bf48-00ddcaac3ee8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.780695] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3d16924-0bce-4818-9aeb-46a80af29376 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.789408] env[61545]: DEBUG nova.network.neutron [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Successfully updated port: f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1115.791259] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256544, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.806086] env[61545]: DEBUG oslo_vmware.api [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for the task: (returnval){ [ 1115.806086] env[61545]: value = "task-4256546" [ 1115.806086] env[61545]: _type = "Task" [ 1115.806086] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.817973] env[61545]: DEBUG oslo_vmware.api [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.004850] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquiring lock "ab1779b4-707e-4bd8-adea-940805654e1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.004850] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "ab1779b4-707e-4bd8-adea-940805654e1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.008093] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f6cdff-dfb4-4f21-b56d-d9978b7defc7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.019135] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fe1c84-397d-402a-b6d7-fc721b2d7102 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.059633] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d95287a-211b-4236-8eab-8f32051fec45 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.069224] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9e7c0f-5b98-4a20-ab73-4f3ad643fa57 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.085078] env[61545]: DEBUG nova.compute.provider_tree [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.285094] env[61545]: DEBUG oslo_vmware.api [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256544, 'name': PowerOnVM_Task, 'duration_secs': 0.682714} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.285393] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1116.285623] env[61545]: INFO nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Took 12.21 seconds to spawn the instance on the hypervisor. [ 1116.285765] env[61545]: DEBUG nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1116.286615] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24959d71-7ac7-4d2e-95f1-fe5942f106cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.296190] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.296348] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.296493] env[61545]: DEBUG nova.network.neutron [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.318284] env[61545]: DEBUG oslo_vmware.api [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Task: {'id': task-4256546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26625} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.319822] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1116.319822] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1116.319822] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1116.319977] env[61545]: INFO nova.compute.manager [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1116.320389] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1116.320722] env[61545]: DEBUG nova.compute.manager [-] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1116.320834] env[61545]: DEBUG nova.network.neutron [-] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1116.376103] env[61545]: DEBUG nova.network.neutron [-] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.512070] env[61545]: DEBUG nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1116.590262] env[61545]: DEBUG nova.scheduler.client.report [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1116.662341] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "f9c9c447-e676-4143-b329-fb6d71bcd553" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.662627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.662815] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.663013] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.663182] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.665459] env[61545]: INFO nova.compute.manager [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Terminating instance [ 1116.820470] env[61545]: INFO nova.compute.manager [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Took 30.97 seconds to build instance. [ 1116.862233] env[61545]: DEBUG nova.network.neutron [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1116.878629] env[61545]: INFO nova.compute.manager [-] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Took 1.74 seconds to deallocate network for instance. [ 1117.044553] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.096778] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.099260] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.889s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.099530] env[61545]: DEBUG nova.objects.instance [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'resources' on Instance uuid 97b72809-2a1e-4eda-af82-71cac2d79a64 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.121506] env[61545]: INFO nova.scheduler.client.report [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Deleted allocations for instance 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a [ 1117.169285] env[61545]: DEBUG nova.compute.manager [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1117.169541] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1117.170859] env[61545]: DEBUG nova.network.neutron [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Updating instance_info_cache with network_info: [{"id": "f18fe5c7-64c8-4f58-b7c8-806d3e03985e", "address": "fa:16:3e:55:97:c9", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf18fe5c7-64", "ovs_interfaceid": "f18fe5c7-64c8-4f58-b7c8-806d3e03985e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.172830] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcfcda6-2d6d-4ca4-8075-592e9ad1ebd0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.181797] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1117.184100] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62e031cc-e4f8-4d35-a829-ae87adbb9c6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.192676] env[61545]: DEBUG oslo_vmware.api [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1117.192676] env[61545]: value = "task-4256547" [ 1117.192676] env[61545]: _type = "Task" [ 1117.192676] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.205481] env[61545]: DEBUG oslo_vmware.api [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.263212] env[61545]: DEBUG nova.network.neutron [-] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.324784] env[61545]: DEBUG oslo_concurrency.lockutils [None req-19a12e62-0bd3-4bfc-b1a0-4b0cface0f15 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.490s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.386488] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.632479] env[61545]: DEBUG oslo_concurrency.lockutils [None req-160f7e2c-b658-4a91-84a1-1f5ceacc74ab tempest-InstanceActionsNegativeTestJSON-1303272001 tempest-InstanceActionsNegativeTestJSON-1303272001-project-member] Lock "6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.872s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.678178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.679202] env[61545]: DEBUG nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Instance network_info: |[{"id": "f18fe5c7-64c8-4f58-b7c8-806d3e03985e", "address": "fa:16:3e:55:97:c9", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf18fe5c7-64", "ovs_interfaceid": "f18fe5c7-64c8-4f58-b7c8-806d3e03985e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1117.679502] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:97:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10ff2092-e8eb-4768-ad4a-65a80560b447', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f18fe5c7-64c8-4f58-b7c8-806d3e03985e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.694017] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.694017] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.694017] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcac2ee3-d4f3-4f64-8f22-115877c06a99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.722478] env[61545]: DEBUG oslo_vmware.api [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256547, 'name': PowerOffVM_Task, 'duration_secs': 0.251199} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.724314] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1117.724533] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1117.724819] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.724819] env[61545]: value = "task-4256548" [ 1117.724819] env[61545]: _type = "Task" [ 1117.724819] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.725888] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ef56a1e-699b-4475-9b58-dbc97188f3ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.743349] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256548, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.766346] env[61545]: INFO nova.compute.manager [-] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Took 1.45 seconds to deallocate network for instance. [ 1117.808311] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1117.810891] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1117.810891] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleting the datastore file [datastore2] f9c9c447-e676-4143-b329-fb6d71bcd553 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1117.810891] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-857b4d88-9f83-44c3-b893-9f196b223282 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.817599] env[61545]: DEBUG oslo_vmware.api [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for the task: (returnval){ [ 1117.817599] env[61545]: value = "task-4256550" [ 1117.817599] env[61545]: _type = "Task" [ 1117.817599] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.833622] env[61545]: DEBUG oslo_vmware.api [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.884561] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9add74d6-d138-4a60-b951-5cf01efc76b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.897491] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ce542b-f685-486b-a6a2-7600bd77269e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.942455] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b767d9a7-7381-4102-b007-29740eaac1e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.952593] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fecc46-5a2e-4011-b576-a15d4314b483 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.967736] env[61545]: DEBUG nova.compute.provider_tree [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.983021] env[61545]: DEBUG nova.compute.manager [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Received event network-changed-f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1117.983147] env[61545]: DEBUG nova.compute.manager [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Refreshing instance network info cache due to event network-changed-f18fe5c7-64c8-4f58-b7c8-806d3e03985e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1117.983374] env[61545]: DEBUG oslo_concurrency.lockutils [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] Acquiring lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.983840] env[61545]: DEBUG oslo_concurrency.lockutils [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] Acquired lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.983840] env[61545]: DEBUG nova.network.neutron [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Refreshing network info cache for port f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1118.244030] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256548, 'name': CreateVM_Task, 'duration_secs': 0.344229} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.244030] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.244715] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.244881] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.245248] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1118.245535] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a43d967-2a10-43a7-b70c-5cbb2127434d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.249786] env[61545]: DEBUG nova.compute.manager [req-6a7f1965-8cb4-4ff0-be35-70ef483d3c2e req-cb9babfd-ef0b-428f-abc3-fee87cb7a437 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Received event network-changed-55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1118.250118] env[61545]: DEBUG nova.compute.manager [req-6a7f1965-8cb4-4ff0-be35-70ef483d3c2e req-cb9babfd-ef0b-428f-abc3-fee87cb7a437 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Refreshing instance network info cache due to event network-changed-55458c7f-c486-49fb-966b-0478ed8948ee. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1118.251106] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a7f1965-8cb4-4ff0-be35-70ef483d3c2e req-cb9babfd-ef0b-428f-abc3-fee87cb7a437 service nova] Acquiring lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.251267] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a7f1965-8cb4-4ff0-be35-70ef483d3c2e req-cb9babfd-ef0b-428f-abc3-fee87cb7a437 service nova] Acquired lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.251446] env[61545]: DEBUG nova.network.neutron [req-6a7f1965-8cb4-4ff0-be35-70ef483d3c2e req-cb9babfd-ef0b-428f-abc3-fee87cb7a437 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Refreshing network info cache for port 55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1118.257062] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1118.257062] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a4369a-dd14-47e2-f344-794280762199" [ 1118.257062] env[61545]: _type = "Task" [ 1118.257062] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.271427] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a4369a-dd14-47e2-f344-794280762199, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.280908] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.333766] env[61545]: DEBUG oslo_vmware.api [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Task: {'id': task-4256550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24151} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.334485] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.334791] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1118.335394] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1118.335394] env[61545]: INFO nova.compute.manager [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1118.335732] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1118.336305] env[61545]: DEBUG nova.compute.manager [-] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1118.336693] env[61545]: DEBUG nova.network.neutron [-] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1118.472099] env[61545]: DEBUG nova.scheduler.client.report [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1118.715880] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.716570] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.769315] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a4369a-dd14-47e2-f344-794280762199, 'name': SearchDatastore_Task, 'duration_secs': 0.027468} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.769636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.769966] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1118.771074] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.771074] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.771074] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1118.771074] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9b803ca-d468-4174-aef3-3f2bb543dd59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.782458] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1118.782762] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1118.784031] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-630e347b-8e16-4333-a4c7-21c40e9eba41 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.790763] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1118.790763] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52695e84-99d1-fcb8-9440-e6ee32deb092" [ 1118.790763] env[61545]: _type = "Task" [ 1118.790763] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.802745] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52695e84-99d1-fcb8-9440-e6ee32deb092, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.979125] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.880s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.982114] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.763s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.982114] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.982257] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1118.984394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.062s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.989664] env[61545]: INFO nova.compute.claims [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.995517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5990f84e-9c19-441c-81ae-40e7809955aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.013069] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2567db4-b9ea-4133-a5c8-7f7d89016783 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.030986] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ef7609-703a-4880-99b6-0993d5b986b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.035088] env[61545]: INFO nova.scheduler.client.report [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleted allocations for instance 97b72809-2a1e-4eda-af82-71cac2d79a64 [ 1119.053464] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df1a824-c9b0-4918-a332-a452eff6defd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.091967] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179343MB free_disk=246GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1119.092289] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.219410] env[61545]: DEBUG nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1119.308942] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52695e84-99d1-fcb8-9440-e6ee32deb092, 'name': SearchDatastore_Task, 'duration_secs': 0.011844} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.309087] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24037f4b-5e8d-416c-b81a-c55bbe2dc72e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.315867] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1119.315867] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5287b744-d803-4703-6b1a-da58c6ef2113" [ 1119.315867] env[61545]: _type = "Task" [ 1119.315867] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.325167] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5287b744-d803-4703-6b1a-da58c6ef2113, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.395041] env[61545]: DEBUG nova.network.neutron [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Updated VIF entry in instance network info cache for port f18fe5c7-64c8-4f58-b7c8-806d3e03985e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1119.395426] env[61545]: DEBUG nova.network.neutron [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Updating instance_info_cache with network_info: [{"id": "f18fe5c7-64c8-4f58-b7c8-806d3e03985e", "address": "fa:16:3e:55:97:c9", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf18fe5c7-64", "ovs_interfaceid": "f18fe5c7-64c8-4f58-b7c8-806d3e03985e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.558386] env[61545]: DEBUG oslo_concurrency.lockutils [None req-daf527d0-852e-44cf-be10-61832fc4e854 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "97b72809-2a1e-4eda-af82-71cac2d79a64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.269s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.598482] env[61545]: DEBUG nova.network.neutron [req-6a7f1965-8cb4-4ff0-be35-70ef483d3c2e req-cb9babfd-ef0b-428f-abc3-fee87cb7a437 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updated VIF entry in instance network info cache for port 55458c7f-c486-49fb-966b-0478ed8948ee. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1119.598886] env[61545]: DEBUG nova.network.neutron [req-6a7f1965-8cb4-4ff0-be35-70ef483d3c2e req-cb9babfd-ef0b-428f-abc3-fee87cb7a437 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance_info_cache with network_info: [{"id": "55458c7f-c486-49fb-966b-0478ed8948ee", "address": "fa:16:3e:e2:4c:9e", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55458c7f-c4", "ovs_interfaceid": "55458c7f-c486-49fb-966b-0478ed8948ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.746911] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.779852] env[61545]: DEBUG nova.network.neutron [-] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.829324] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5287b744-d803-4703-6b1a-da58c6ef2113, 'name': SearchDatastore_Task, 'duration_secs': 0.020707} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.829649] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.829988] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c61ca5f4-78ae-4626-977d-8c17dc12c012/c61ca5f4-78ae-4626-977d-8c17dc12c012.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1119.830258] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-290bd8d4-6674-4b50-b810-4b041bd7add6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.837775] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1119.837775] env[61545]: value = "task-4256551" [ 1119.837775] env[61545]: _type = "Task" [ 1119.837775] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.857754] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256551, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.898986] env[61545]: DEBUG oslo_concurrency.lockutils [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] Releasing lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.898986] env[61545]: DEBUG nova.compute.manager [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Received event network-vif-deleted-169863f3-63da-4e6e-8dbb-514951796bf8 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1119.898986] env[61545]: DEBUG nova.compute.manager [req-ceb3ec81-8b2c-4fed-8338-7db85ef83c8e req-f2ddf99c-64d7-450a-b075-1f85acc337b5 service nova] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Received event network-vif-deleted-6070a5d2-428a-463d-9f30-9d446eb6a23a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1120.030480] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.030945] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.038306] env[61545]: DEBUG nova.compute.manager [req-0e803690-97b4-4b9a-858c-48ae084e0a5e req-e044875f-6988-4139-a73b-7fd23616b856 service nova] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Received event network-vif-deleted-2147b830-281d-4a24-90d1-22eccefc4c5c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1120.101820] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a7f1965-8cb4-4ff0-be35-70ef483d3c2e req-cb9babfd-ef0b-428f-abc3-fee87cb7a437 service nova] Releasing lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.264836] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca0385d-a566-4b75-8285-8e4bc3642400 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.279753] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb1f73f-1265-48bf-b20c-987d5383c672 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.285845] env[61545]: INFO nova.compute.manager [-] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Took 1.95 seconds to deallocate network for instance. [ 1120.321994] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c099f43-8023-4cee-9592-14de4cadfd77 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.332142] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa9ce21-b3c4-4c1c-9e9a-75fa842ca0f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.354806] env[61545]: DEBUG nova.compute.provider_tree [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.365691] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256551, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.520031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.520588] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.538931] env[61545]: DEBUG nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1120.822562] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.854672] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256551, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.90819} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.855237] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c61ca5f4-78ae-4626-977d-8c17dc12c012/c61ca5f4-78ae-4626-977d-8c17dc12c012.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.857973] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1120.857973] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-115a4681-03ac-40b2-b160-0721ab5fdff5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.860059] env[61545]: DEBUG nova.scheduler.client.report [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.867136] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1120.867136] env[61545]: value = "task-4256552" [ 1120.867136] env[61545]: _type = "Task" [ 1120.867136] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.881966] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256552, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.028392] env[61545]: INFO nova.compute.manager [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Detaching volume 9009d07b-81b0-4ba0-ae46-44590740ed11 [ 1121.075627] env[61545]: INFO nova.virt.block_device [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Attempting to driver detach volume 9009d07b-81b0-4ba0-ae46-44590740ed11 from mountpoint /dev/sdb [ 1121.075990] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1121.076418] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838777', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'name': 'volume-9009d07b-81b0-4ba0-ae46-44590740ed11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42', 'attached_at': '', 'detached_at': '', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'serial': '9009d07b-81b0-4ba0-ae46-44590740ed11'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1121.078665] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd15e486-218f-4946-9aef-d5b3587ca8b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.101712] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.102631] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a9d62c-9285-4e17-a736-f14fc1e337c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.110858] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ab398a-7b59-4e08-a215-f708fc9c41fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.135511] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf508ec9-0109-47fc-9bde-1d98f5b32c24 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.153028] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] The volume has not been displaced from its original location: [datastore1] volume-9009d07b-81b0-4ba0-ae46-44590740ed11/volume-9009d07b-81b0-4ba0-ae46-44590740ed11.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1121.158147] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1121.158569] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd8a2e13-380f-4eab-a2e0-0f35a2122144 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.178005] env[61545]: DEBUG oslo_vmware.api [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1121.178005] env[61545]: value = "task-4256553" [ 1121.178005] env[61545]: _type = "Task" [ 1121.178005] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.188272] env[61545]: DEBUG oslo_vmware.api [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256553, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.371248] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.388s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.371892] env[61545]: DEBUG nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1121.378475] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.835s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.378738] env[61545]: DEBUG nova.objects.instance [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lazy-loading 'resources' on Instance uuid 16bc91d0-71c3-4bd9-980b-6574c3fd9335 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.380027] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256552, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089989} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.383911] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1121.385183] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a3e79f-ba26-45a9-86f5-b800a3811afc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.420326] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] c61ca5f4-78ae-4626-977d-8c17dc12c012/c61ca5f4-78ae-4626-977d-8c17dc12c012.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.422484] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6058e8d6-5644-41b7-a1bc-6c18bcc950d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.451565] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1121.451565] env[61545]: value = "task-4256554" [ 1121.451565] env[61545]: _type = "Task" [ 1121.451565] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.463511] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256554, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.694888] env[61545]: DEBUG oslo_vmware.api [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256553, 'name': ReconfigVM_Task, 'duration_secs': 0.25069} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.696623] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1121.704896] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9394754-b89b-4238-9667-6b57d33a4c36 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.723584] env[61545]: DEBUG oslo_vmware.api [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1121.723584] env[61545]: value = "task-4256555" [ 1121.723584] env[61545]: _type = "Task" [ 1121.723584] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.738509] env[61545]: DEBUG oslo_vmware.api [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256555, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.888125] env[61545]: DEBUG nova.compute.utils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1121.888125] env[61545]: DEBUG nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1121.888125] env[61545]: DEBUG nova.network.neutron [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1121.970878] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256554, 'name': ReconfigVM_Task, 'duration_secs': 0.288497} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.971380] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Reconfigured VM instance instance-0000005c to attach disk [datastore2] c61ca5f4-78ae-4626-977d-8c17dc12c012/c61ca5f4-78ae-4626-977d-8c17dc12c012.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.975381] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8112431d-1838-49d3-983c-e087bab23cba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.984211] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1121.984211] env[61545]: value = "task-4256556" [ 1121.984211] env[61545]: _type = "Task" [ 1121.984211] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.996375] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256556, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.130549] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f16916-7717-443d-9613-fdc56c0cc92a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.141834] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48aaee0-ce8d-4904-815a-71a3c9c19774 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.177454] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10e3e18-8539-47da-854a-866ef2f68c85 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.186331] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05c7fdd-c763-475d-8cdf-068fbf9d844c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.202692] env[61545]: DEBUG nova.compute.provider_tree [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.237247] env[61545]: DEBUG oslo_vmware.api [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256555, 'name': ReconfigVM_Task, 'duration_secs': 0.153721} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.237247] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838777', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'name': 'volume-9009d07b-81b0-4ba0-ae46-44590740ed11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42', 'attached_at': '', 'detached_at': '', 'volume_id': '9009d07b-81b0-4ba0-ae46-44590740ed11', 'serial': '9009d07b-81b0-4ba0-ae46-44590740ed11'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1122.240993] env[61545]: DEBUG nova.policy [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82de8ada56cd46319fe4c7ecd4957abb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da39b1ee6df640b89a9dab58e3380397', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1122.299585] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquiring lock "4f713be2-4c38-413b-874d-a39a4c01a1be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.299871] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "4f713be2-4c38-413b-874d-a39a4c01a1be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.392033] env[61545]: DEBUG nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1122.496377] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256556, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.707269] env[61545]: DEBUG nova.scheduler.client.report [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.735103] env[61545]: DEBUG nova.network.neutron [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Successfully created port: a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1122.795175] env[61545]: DEBUG nova.objects.instance [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'flavor' on Instance uuid 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.805191] env[61545]: DEBUG nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1122.996713] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256556, 'name': Rename_Task, 'duration_secs': 0.986831} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.997031] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.997291] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86d155c0-01f8-42ec-9678-a18e9cdab0e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.005096] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1123.005096] env[61545]: value = "task-4256557" [ 1123.005096] env[61545]: _type = "Task" [ 1123.005096] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.015086] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.131255] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.131546] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.213576] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.835s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.218050] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.174s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.219698] env[61545]: INFO nova.compute.claims [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1123.251461] env[61545]: INFO nova.scheduler.client.report [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Deleted allocations for instance 16bc91d0-71c3-4bd9-980b-6574c3fd9335 [ 1123.326707] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.410459] env[61545]: DEBUG nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1123.446147] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1123.446409] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1123.446565] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1123.446742] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1123.446887] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1123.447059] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1123.447275] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1123.447435] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1123.447605] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1123.447769] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1123.447939] env[61545]: DEBUG nova.virt.hardware [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1123.449220] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1f318b-739c-406a-993a-e963706239de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.458142] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e211795f-76be-4344-a981-31d8cdb1cb02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.515428] env[61545]: DEBUG oslo_vmware.api [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256557, 'name': PowerOnVM_Task, 'duration_secs': 0.469474} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.515693] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1123.515894] env[61545]: INFO nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Took 10.17 seconds to spawn the instance on the hypervisor. [ 1123.516080] env[61545]: DEBUG nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1123.516894] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a582ef4-19ec-40ee-90b9-44db31c3b347 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.633665] env[61545]: DEBUG nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1123.763995] env[61545]: DEBUG oslo_concurrency.lockutils [None req-04348b26-b8b0-469a-8c35-dfbb38b75427 tempest-ServerRescueNegativeTestJSON-252212181 tempest-ServerRescueNegativeTestJSON-252212181-project-member] Lock "16bc91d0-71c3-4bd9-980b-6574c3fd9335" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.373s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.802265] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ad4d8418-5174-4ed2-af26-a1bc35d302ff tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.281s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.038174] env[61545]: INFO nova.compute.manager [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Took 25.27 seconds to build instance. [ 1124.160770] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.199684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.199684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.199684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.199684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.199684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.202064] env[61545]: INFO nova.compute.manager [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Terminating instance [ 1124.472949] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eeb7333-c7cc-457b-94d0-eb7181ea1178 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.483583] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d149ef9-00bc-40a7-a885-d98022fac2ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.522019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06757f7-46f1-482c-ab83-febe38f1207f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.529271] env[61545]: DEBUG nova.compute.manager [req-d0b7fae9-1808-4b62-ae65-2dd91029b9a6 req-361bb833-63d7-447b-867f-5c5328d9e00f service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Received event network-vif-plugged-a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1124.529551] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b7fae9-1808-4b62-ae65-2dd91029b9a6 req-361bb833-63d7-447b-867f-5c5328d9e00f service nova] Acquiring lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.529698] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b7fae9-1808-4b62-ae65-2dd91029b9a6 req-361bb833-63d7-447b-867f-5c5328d9e00f service nova] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.529865] env[61545]: DEBUG oslo_concurrency.lockutils [req-d0b7fae9-1808-4b62-ae65-2dd91029b9a6 req-361bb833-63d7-447b-867f-5c5328d9e00f service nova] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.530017] env[61545]: DEBUG nova.compute.manager [req-d0b7fae9-1808-4b62-ae65-2dd91029b9a6 req-361bb833-63d7-447b-867f-5c5328d9e00f service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] No waiting events found dispatching network-vif-plugged-a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1124.530272] env[61545]: WARNING nova.compute.manager [req-d0b7fae9-1808-4b62-ae65-2dd91029b9a6 req-361bb833-63d7-447b-867f-5c5328d9e00f service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Received unexpected event network-vif-plugged-a89c03e7-6504-4eca-9dc3-110100bbf69c for instance with vm_state building and task_state spawning. [ 1124.537818] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e051fc4-f263-4893-a113-5e35bda5c048 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.542611] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b332c56b-c769-4614-8e17-fd33cb9e51dc tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.787s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.553930] env[61545]: DEBUG nova.compute.provider_tree [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.709480] env[61545]: DEBUG nova.compute.manager [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1124.709718] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1124.710638] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ee8a11-593e-4707-9718-0bf5c38915b1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.723018] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.723018] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d538958d-bb91-4f02-a579-f361b4f52ef3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.729356] env[61545]: DEBUG oslo_vmware.api [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1124.729356] env[61545]: value = "task-4256558" [ 1124.729356] env[61545]: _type = "Task" [ 1124.729356] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.740453] env[61545]: DEBUG oslo_vmware.api [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.918015] env[61545]: DEBUG nova.network.neutron [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Successfully updated port: a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1125.060119] env[61545]: DEBUG nova.scheduler.client.report [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.242967] env[61545]: DEBUG oslo_vmware.api [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256558, 'name': PowerOffVM_Task, 'duration_secs': 0.305376} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.245801] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1125.245801] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1125.245801] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a04c3bf7-6b1b-4a12-aa83-bcd89048f442 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.311592] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1125.311592] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1125.311592] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleting the datastore file [datastore2] 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.311592] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef14da6a-0ab6-4a85-8f69-c7831efe7919 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.321428] env[61545]: DEBUG oslo_vmware.api [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1125.321428] env[61545]: value = "task-4256560" [ 1125.321428] env[61545]: _type = "Task" [ 1125.321428] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.332641] env[61545]: DEBUG oslo_vmware.api [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.421368] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.422690] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.423817] env[61545]: DEBUG nova.network.neutron [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1125.533308] env[61545]: DEBUG nova.compute.manager [req-94115aff-c882-4f0e-8ae3-dabd99265ce3 req-6135d1d1-66e0-46f5-8500-5c0afa3f0764 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Received event network-changed-f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1125.533308] env[61545]: DEBUG nova.compute.manager [req-94115aff-c882-4f0e-8ae3-dabd99265ce3 req-6135d1d1-66e0-46f5-8500-5c0afa3f0764 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Refreshing instance network info cache due to event network-changed-f18fe5c7-64c8-4f58-b7c8-806d3e03985e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1125.533825] env[61545]: DEBUG oslo_concurrency.lockutils [req-94115aff-c882-4f0e-8ae3-dabd99265ce3 req-6135d1d1-66e0-46f5-8500-5c0afa3f0764 service nova] Acquiring lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.534220] env[61545]: DEBUG oslo_concurrency.lockutils [req-94115aff-c882-4f0e-8ae3-dabd99265ce3 req-6135d1d1-66e0-46f5-8500-5c0afa3f0764 service nova] Acquired lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.534919] env[61545]: DEBUG nova.network.neutron [req-94115aff-c882-4f0e-8ae3-dabd99265ce3 req-6135d1d1-66e0-46f5-8500-5c0afa3f0764 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Refreshing network info cache for port f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1125.565926] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.565926] env[61545]: DEBUG nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1125.568080] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.182s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.568731] env[61545]: DEBUG nova.objects.instance [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'resources' on Instance uuid 79762f13-2f93-43ba-883b-9437c7732c04 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.836392] env[61545]: DEBUG oslo_vmware.api [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325374} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.836392] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.836687] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1125.837243] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1125.837243] env[61545]: INFO nova.compute.manager [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1125.837508] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1125.837722] env[61545]: DEBUG nova.compute.manager [-] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1125.837857] env[61545]: DEBUG nova.network.neutron [-] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1126.031600] env[61545]: DEBUG nova.network.neutron [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1126.073393] env[61545]: DEBUG nova.compute.utils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1126.078099] env[61545]: DEBUG nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1126.078631] env[61545]: DEBUG nova.network.neutron [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1126.320916] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1029994e-147f-4ff6-9f33-902fc6bafcc5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.332912] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15adbea9-307a-477a-b454-f4150c609893 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.366940] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b71491-15af-49f6-ac09-01c5539d5df2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.376198] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cb3be5-1194-462f-9a73-40242bfe4441 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.397860] env[61545]: DEBUG nova.compute.provider_tree [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.511440] env[61545]: DEBUG nova.policy [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9e3d40cf5dc432187dbe952703e9bc1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29603c76a801442ba2af0d91ab240290', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1126.579057] env[61545]: DEBUG nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1126.801929] env[61545]: DEBUG nova.compute.manager [req-5196c275-fc1f-4c65-bbcd-eeeecce30c2f req-47f6c93b-c8ec-4564-b384-ddfd61b2fb7c service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Received event network-changed-a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1126.802154] env[61545]: DEBUG nova.compute.manager [req-5196c275-fc1f-4c65-bbcd-eeeecce30c2f req-47f6c93b-c8ec-4564-b384-ddfd61b2fb7c service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Refreshing instance network info cache due to event network-changed-a89c03e7-6504-4eca-9dc3-110100bbf69c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1126.804210] env[61545]: DEBUG oslo_concurrency.lockutils [req-5196c275-fc1f-4c65-bbcd-eeeecce30c2f req-47f6c93b-c8ec-4564-b384-ddfd61b2fb7c service nova] Acquiring lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.902199] env[61545]: DEBUG nova.scheduler.client.report [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.932973] env[61545]: DEBUG nova.network.neutron [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updating instance_info_cache with network_info: [{"id": "a89c03e7-6504-4eca-9dc3-110100bbf69c", "address": "fa:16:3e:30:26:4e", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa89c03e7-65", "ovs_interfaceid": "a89c03e7-6504-4eca-9dc3-110100bbf69c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.175092] env[61545]: DEBUG nova.network.neutron [req-94115aff-c882-4f0e-8ae3-dabd99265ce3 req-6135d1d1-66e0-46f5-8500-5c0afa3f0764 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Updated VIF entry in instance network info cache for port f18fe5c7-64c8-4f58-b7c8-806d3e03985e. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1127.175323] env[61545]: DEBUG nova.network.neutron [req-94115aff-c882-4f0e-8ae3-dabd99265ce3 req-6135d1d1-66e0-46f5-8500-5c0afa3f0764 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Updating instance_info_cache with network_info: [{"id": "f18fe5c7-64c8-4f58-b7c8-806d3e03985e", "address": "fa:16:3e:55:97:c9", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf18fe5c7-64", "ovs_interfaceid": "f18fe5c7-64c8-4f58-b7c8-806d3e03985e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.409916] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.412840] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.132s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.413287] env[61545]: DEBUG nova.objects.instance [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lazy-loading 'resources' on Instance uuid a7967300-6760-4310-bf48-00ddcaac3ee8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.434599] env[61545]: INFO nova.scheduler.client.report [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted allocations for instance 79762f13-2f93-43ba-883b-9437c7732c04 [ 1127.435762] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.436049] env[61545]: DEBUG nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Instance network_info: |[{"id": "a89c03e7-6504-4eca-9dc3-110100bbf69c", "address": "fa:16:3e:30:26:4e", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa89c03e7-65", "ovs_interfaceid": "a89c03e7-6504-4eca-9dc3-110100bbf69c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1127.439254] env[61545]: DEBUG oslo_concurrency.lockutils [req-5196c275-fc1f-4c65-bbcd-eeeecce30c2f req-47f6c93b-c8ec-4564-b384-ddfd61b2fb7c service nova] Acquired lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.439467] env[61545]: DEBUG nova.network.neutron [req-5196c275-fc1f-4c65-bbcd-eeeecce30c2f req-47f6c93b-c8ec-4564-b384-ddfd61b2fb7c service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Refreshing network info cache for port a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1127.440470] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:26:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a89c03e7-6504-4eca-9dc3-110100bbf69c', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.448201] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1127.449523] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1127.449618] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20a19649-119e-42cd-b1ec-31a02aab87b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.472281] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.472281] env[61545]: value = "task-4256561" [ 1127.472281] env[61545]: _type = "Task" [ 1127.472281] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.485951] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256561, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.491066] env[61545]: DEBUG nova.network.neutron [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Successfully created port: f890630b-67d4-4991-a8b1-b4c414add2d3 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1127.591107] env[61545]: DEBUG nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1127.625730] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1127.625730] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.625730] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1127.625885] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.626051] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1127.626240] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1127.626459] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1127.626616] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1127.626777] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1127.626950] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1127.627137] env[61545]: DEBUG nova.virt.hardware [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1127.628089] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f149d4-5a49-4e94-88e2-a8dbe36b0d2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.637517] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df95330a-e207-484d-a941-735d474771c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.678078] env[61545]: DEBUG oslo_concurrency.lockutils [req-94115aff-c882-4f0e-8ae3-dabd99265ce3 req-6135d1d1-66e0-46f5-8500-5c0afa3f0764 service nova] Releasing lock "refresh_cache-c61ca5f4-78ae-4626-977d-8c17dc12c012" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.955792] env[61545]: DEBUG oslo_concurrency.lockutils [None req-14d4cf73-cd50-4f3b-b2f8-9c43a3658516 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "79762f13-2f93-43ba-883b-9437c7732c04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.945s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.981678] env[61545]: DEBUG nova.network.neutron [-] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.991907] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256561, 'name': CreateVM_Task, 'duration_secs': 0.368355} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.992123] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1127.995303] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.995469] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.995774] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1127.996248] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe73db72-71aa-4582-bbad-9946aad00bf7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.005290] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1128.005290] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52044947-6763-2787-c84e-292dc37cfaf0" [ 1128.005290] env[61545]: _type = "Task" [ 1128.005290] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.017327] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52044947-6763-2787-c84e-292dc37cfaf0, 'name': SearchDatastore_Task, 'duration_secs': 0.009751} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.017623] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.017856] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.018103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.018255] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.018437] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.018706] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97cc3f52-1b20-4125-9810-9a208b837db8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.032646] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.032851] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.036079] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83af7683-6472-49fd-b5d8-a88721338e64 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.042730] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1128.042730] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a1012c-c777-81cf-b33f-2942086101c6" [ 1128.042730] env[61545]: _type = "Task" [ 1128.042730] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.056531] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a1012c-c777-81cf-b33f-2942086101c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.181437] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d119884-baf5-474b-907b-edc9e5398ad6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.191750] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60763c8-a2de-45e2-b1b7-5e0e657864e1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.224048] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65be81c8-c5d6-43db-9ed0-bf8db3161605 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.232149] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df43a983-7146-4507-8b5f-f2c0761d5765 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.236625] env[61545]: DEBUG nova.network.neutron [req-5196c275-fc1f-4c65-bbcd-eeeecce30c2f req-47f6c93b-c8ec-4564-b384-ddfd61b2fb7c service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updated VIF entry in instance network info cache for port a89c03e7-6504-4eca-9dc3-110100bbf69c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1128.236961] env[61545]: DEBUG nova.network.neutron [req-5196c275-fc1f-4c65-bbcd-eeeecce30c2f req-47f6c93b-c8ec-4564-b384-ddfd61b2fb7c service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updating instance_info_cache with network_info: [{"id": "a89c03e7-6504-4eca-9dc3-110100bbf69c", "address": "fa:16:3e:30:26:4e", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa89c03e7-65", "ovs_interfaceid": "a89c03e7-6504-4eca-9dc3-110100bbf69c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.250278] env[61545]: DEBUG nova.compute.provider_tree [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.277616] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "f2975097-29a3-46cc-9dea-0c414baff246" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.277616] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "f2975097-29a3-46cc-9dea-0c414baff246" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.485664] env[61545]: INFO nova.compute.manager [-] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Took 2.65 seconds to deallocate network for instance. [ 1128.556090] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a1012c-c777-81cf-b33f-2942086101c6, 'name': SearchDatastore_Task, 'duration_secs': 0.010618} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.556993] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71e93b9b-d0de-430b-beda-00abc8d70a8b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.563370] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1128.563370] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52533475-b600-1a92-9f1c-ab0fdbaa1164" [ 1128.563370] env[61545]: _type = "Task" [ 1128.563370] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.572616] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52533475-b600-1a92-9f1c-ab0fdbaa1164, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.739766] env[61545]: DEBUG oslo_concurrency.lockutils [req-5196c275-fc1f-4c65-bbcd-eeeecce30c2f req-47f6c93b-c8ec-4564-b384-ddfd61b2fb7c service nova] Releasing lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.754666] env[61545]: DEBUG nova.scheduler.client.report [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.779360] env[61545]: DEBUG nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1128.853695] env[61545]: DEBUG nova.compute.manager [req-7521a91d-1e59-4c67-ae92-cfc7693971f9 req-603c88de-a873-471a-b971-de36c5095553 service nova] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Received event network-vif-deleted-7e7e6bd8-fac2-4516-af29-a249216acca6 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1128.992018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.074368] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52533475-b600-1a92-9f1c-ab0fdbaa1164, 'name': SearchDatastore_Task, 'duration_secs': 0.032231} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.075803] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.076067] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478/c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1129.078491] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0e882e4-9692-4bad-80a3-2cf9949eee95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.081039] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.081272] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.087684] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1129.087684] env[61545]: value = "task-4256562" [ 1129.087684] env[61545]: _type = "Task" [ 1129.087684] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.098078] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256562, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.215280] env[61545]: DEBUG nova.network.neutron [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Successfully updated port: f890630b-67d4-4991-a8b1-b4c414add2d3 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1129.260416] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.847s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.263428] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 10.171s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.289804] env[61545]: INFO nova.scheduler.client.report [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Deleted allocations for instance a7967300-6760-4310-bf48-00ddcaac3ee8 [ 1129.303139] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.583439] env[61545]: DEBUG nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1129.597406] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256562, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491248} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.597704] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478/c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1129.597919] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1129.598206] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ecf92d3-917e-4d95-91a5-95153a411e4b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.606066] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1129.606066] env[61545]: value = "task-4256563" [ 1129.606066] env[61545]: _type = "Task" [ 1129.606066] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.615824] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256563, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.717728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquiring lock "refresh_cache-ab1779b4-707e-4bd8-adea-940805654e1a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.717936] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquired lock "refresh_cache-ab1779b4-707e-4bd8-adea-940805654e1a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.718081] env[61545]: DEBUG nova.network.neutron [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1129.801963] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d82ed820-6243-4a19-aae8-9c706b3e5a78 tempest-ImagesOneServerNegativeTestJSON-513524542 tempest-ImagesOneServerNegativeTestJSON-513524542-project-member] Lock "a7967300-6760-4310-bf48-00ddcaac3ee8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.154s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.106334] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.116452] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256563, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078919} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.116719] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1130.117616] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2450738-1e81-4add-baa5-3d0dceee47de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.140457] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478/c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.140580] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95efaeb9-2903-4224-844b-0c8626f4de91 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.160267] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1130.160267] env[61545]: value = "task-4256564" [ 1130.160267] env[61545]: _type = "Task" [ 1130.160267] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.168978] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256564, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.249524] env[61545]: DEBUG nova.network.neutron [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1130.291843] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d0f42893-3332-4027-93df-bb46e3350485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1130.292277] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1130.292593] env[61545]: WARNING nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance f9c9c447-e676-4143-b329-fb6d71bcd553 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1130.292800] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 2129a1eb-4ad7-42ef-9554-6202f7a44f58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1130.292980] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 12aed0d0-b5dd-4f1b-913a-000c06a8eab4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1130.293183] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance c61ca5f4-78ae-4626-977d-8c17dc12c012 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1130.293744] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1130.293744] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance ab1779b4-707e-4bd8-adea-940805654e1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1130.400113] env[61545]: DEBUG nova.network.neutron [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Updating instance_info_cache with network_info: [{"id": "f890630b-67d4-4991-a8b1-b4c414add2d3", "address": "fa:16:3e:82:7d:5d", "network": {"id": "86cadc05-8b91-4008-8b1d-8e2144d96a75", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-879102591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29603c76a801442ba2af0d91ab240290", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf890630b-67", "ovs_interfaceid": "f890630b-67d4-4991-a8b1-b4c414add2d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.670993] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256564, 'name': ReconfigVM_Task, 'duration_secs': 0.38215} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.671646] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfigured VM instance instance-0000005d to attach disk [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478/c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1130.672473] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b613536c-9417-4031-adc0-8efd016229d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.679791] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1130.679791] env[61545]: value = "task-4256565" [ 1130.679791] env[61545]: _type = "Task" [ 1130.679791] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.690929] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256565, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.797021] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 413f3b55-0db1-4331-b19f-5cd6c4eeb48a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1130.888217] env[61545]: DEBUG nova.compute.manager [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Received event network-vif-plugged-f890630b-67d4-4991-a8b1-b4c414add2d3 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1130.888546] env[61545]: DEBUG oslo_concurrency.lockutils [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] Acquiring lock "ab1779b4-707e-4bd8-adea-940805654e1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.888773] env[61545]: DEBUG oslo_concurrency.lockutils [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] Lock "ab1779b4-707e-4bd8-adea-940805654e1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.888943] env[61545]: DEBUG oslo_concurrency.lockutils [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] Lock "ab1779b4-707e-4bd8-adea-940805654e1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.889222] env[61545]: DEBUG nova.compute.manager [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] No waiting events found dispatching network-vif-plugged-f890630b-67d4-4991-a8b1-b4c414add2d3 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1130.889411] env[61545]: WARNING nova.compute.manager [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Received unexpected event network-vif-plugged-f890630b-67d4-4991-a8b1-b4c414add2d3 for instance with vm_state building and task_state spawning. [ 1130.889594] env[61545]: DEBUG nova.compute.manager [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Received event network-changed-f890630b-67d4-4991-a8b1-b4c414add2d3 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1130.889750] env[61545]: DEBUG nova.compute.manager [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Refreshing instance network info cache due to event network-changed-f890630b-67d4-4991-a8b1-b4c414add2d3. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1130.889958] env[61545]: DEBUG oslo_concurrency.lockutils [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] Acquiring lock "refresh_cache-ab1779b4-707e-4bd8-adea-940805654e1a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.903035] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Releasing lock "refresh_cache-ab1779b4-707e-4bd8-adea-940805654e1a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.903318] env[61545]: DEBUG nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Instance network_info: |[{"id": "f890630b-67d4-4991-a8b1-b4c414add2d3", "address": "fa:16:3e:82:7d:5d", "network": {"id": "86cadc05-8b91-4008-8b1d-8e2144d96a75", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-879102591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29603c76a801442ba2af0d91ab240290", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf890630b-67", "ovs_interfaceid": "f890630b-67d4-4991-a8b1-b4c414add2d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1130.903637] env[61545]: DEBUG oslo_concurrency.lockutils [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] Acquired lock "refresh_cache-ab1779b4-707e-4bd8-adea-940805654e1a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.903817] env[61545]: DEBUG nova.network.neutron [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Refreshing network info cache for port f890630b-67d4-4991-a8b1-b4c414add2d3 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1130.904981] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:7d:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a58387dd-f438-4913-af6a-fafb734cd881', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f890630b-67d4-4991-a8b1-b4c414add2d3', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1130.912619] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Creating folder: Project (29603c76a801442ba2af0d91ab240290). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1130.915976] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-075b7819-8125-4511-bf29-0afdfc377080 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.929340] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Created folder: Project (29603c76a801442ba2af0d91ab240290) in parent group-v838542. [ 1130.929617] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Creating folder: Instances. Parent ref: group-v838806. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1130.929881] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4af46015-922e-4712-b676-c5b4038b1465 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.942052] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Created folder: Instances in parent group-v838806. [ 1130.942368] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1130.942578] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1130.942787] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7461a6c0-04f3-4e3d-9c57-a9dde2cb6596 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.968435] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1130.968435] env[61545]: value = "task-4256568" [ 1130.968435] env[61545]: _type = "Task" [ 1130.968435] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.977771] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256568, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.193011] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256565, 'name': Rename_Task, 'duration_secs': 0.148294} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.194069] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.194069] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d9903da-d874-4e4d-b975-543af2a34b17 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.208130] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1131.208130] env[61545]: value = "task-4256569" [ 1131.208130] env[61545]: _type = "Task" [ 1131.208130] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.219234] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256569, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.293714] env[61545]: DEBUG nova.network.neutron [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Updated VIF entry in instance network info cache for port f890630b-67d4-4991-a8b1-b4c414add2d3. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1131.294326] env[61545]: DEBUG nova.network.neutron [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Updating instance_info_cache with network_info: [{"id": "f890630b-67d4-4991-a8b1-b4c414add2d3", "address": "fa:16:3e:82:7d:5d", "network": {"id": "86cadc05-8b91-4008-8b1d-8e2144d96a75", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-879102591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29603c76a801442ba2af0d91ab240290", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf890630b-67", "ovs_interfaceid": "f890630b-67d4-4991-a8b1-b4c414add2d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.302570] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e0ae4965-42eb-4286-8cd9-a5c82426f1bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1131.482170] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256568, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.720489] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256569, 'name': PowerOnVM_Task} progress is 81%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.799670] env[61545]: DEBUG oslo_concurrency.lockutils [req-a21942a6-cd41-42b4-8d4a-63ca287590b5 req-3ef0d29f-2503-4dfe-9b9d-a81e15b8578c service nova] Releasing lock "refresh_cache-ab1779b4-707e-4bd8-adea-940805654e1a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.805257] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 4f713be2-4c38-413b-874d-a39a4c01a1be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1131.980838] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256568, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.220803] env[61545]: DEBUG oslo_vmware.api [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256569, 'name': PowerOnVM_Task, 'duration_secs': 0.87789} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.221112] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.221342] env[61545]: INFO nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1132.221540] env[61545]: DEBUG nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1132.222334] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac70cb4e-31d0-4d9f-96cb-c775ee77765b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.308663] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 3b4fd643-c536-4da9-b1a3-82cd74d24f3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.482079] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256568, 'name': CreateVM_Task, 'duration_secs': 1.342648} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.482308] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1132.483146] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.483402] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.483740] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1132.484019] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea783b75-1eae-476f-b798-908d20430936 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.489143] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1132.489143] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52520567-4bb7-12d7-f620-7f19ca6ada60" [ 1132.489143] env[61545]: _type = "Task" [ 1132.489143] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.499488] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52520567-4bb7-12d7-f620-7f19ca6ada60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.740069] env[61545]: INFO nova.compute.manager [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Took 19.84 seconds to build instance. [ 1132.811210] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance f2975097-29a3-46cc-9dea-0c414baff246 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.000525] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52520567-4bb7-12d7-f620-7f19ca6ada60, 'name': SearchDatastore_Task, 'duration_secs': 0.033551} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.000844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.001095] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1133.001336] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.001486] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.001665] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1133.001938] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68202ab7-a0bb-4e69-9bca-50ba9cc43e40 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.012839] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1133.013090] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1133.013758] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a075cd59-df9e-404b-b778-0a6584fe54ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.019537] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1133.019537] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52147e70-346c-e55c-d110-c6b2aaa0a498" [ 1133.019537] env[61545]: _type = "Task" [ 1133.019537] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.027607] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52147e70-346c-e55c-d110-c6b2aaa0a498, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.199708] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "00c4a77a-e049-4511-95c9-e4b6596490c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.199941] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "00c4a77a-e049-4511-95c9-e4b6596490c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.243066] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5a958a87-db07-4acf-9018-844126d7e7ac tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.353s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.314481] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance a5ec3957-4646-4de4-8eac-9f0fbbf8da52 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.314808] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1133.314972] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=250GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1133.532841] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52147e70-346c-e55c-d110-c6b2aaa0a498, 'name': SearchDatastore_Task, 'duration_secs': 0.011749} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.533805] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae88f24e-bf72-49d5-acd1-df0d37284ae6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.542876] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1133.542876] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529cf79d-cb1e-3e5d-d408-7ca4fbf2b89b" [ 1133.542876] env[61545]: _type = "Task" [ 1133.542876] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.551237] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529cf79d-cb1e-3e5d-d408-7ca4fbf2b89b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.552817] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7acba3-2453-4020-a0c2-62f9b0779d6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.560177] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf9b6ef-00c6-49d6-91f4-f2c37e630289 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.591484] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c740c2ba-9cc8-4bf3-aa70-78d561618cc9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.599690] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02beb4c4-9955-45f4-bcce-7b50d23b9ca9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.613911] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.702102] env[61545]: DEBUG nova.compute.manager [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1133.897658] env[61545]: DEBUG nova.compute.manager [req-7fdbcb7a-f009-4cc6-b8bf-368ca77fae04 req-aa86a3a0-5698-43e9-aeed-e4c6a7d0f154 service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Received event network-changed-a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1133.897778] env[61545]: DEBUG nova.compute.manager [req-7fdbcb7a-f009-4cc6-b8bf-368ca77fae04 req-aa86a3a0-5698-43e9-aeed-e4c6a7d0f154 service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Refreshing instance network info cache due to event network-changed-a89c03e7-6504-4eca-9dc3-110100bbf69c. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1133.898012] env[61545]: DEBUG oslo_concurrency.lockutils [req-7fdbcb7a-f009-4cc6-b8bf-368ca77fae04 req-aa86a3a0-5698-43e9-aeed-e4c6a7d0f154 service nova] Acquiring lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.898166] env[61545]: DEBUG oslo_concurrency.lockutils [req-7fdbcb7a-f009-4cc6-b8bf-368ca77fae04 req-aa86a3a0-5698-43e9-aeed-e4c6a7d0f154 service nova] Acquired lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.898328] env[61545]: DEBUG nova.network.neutron [req-7fdbcb7a-f009-4cc6-b8bf-368ca77fae04 req-aa86a3a0-5698-43e9-aeed-e4c6a7d0f154 service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Refreshing network info cache for port a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1134.054381] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529cf79d-cb1e-3e5d-d408-7ca4fbf2b89b, 'name': SearchDatastore_Task, 'duration_secs': 0.010625} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.054695] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.054958] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ab1779b4-707e-4bd8-adea-940805654e1a/ab1779b4-707e-4bd8-adea-940805654e1a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1134.055231] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbcf2540-1739-4c9e-b588-35978704c685 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.062046] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1134.062046] env[61545]: value = "task-4256570" [ 1134.062046] env[61545]: _type = "Task" [ 1134.062046] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.070152] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.117413] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.233098] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.572432] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256570, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.623710] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1134.623961] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.361s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.624257] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.878s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.625929] env[61545]: INFO nova.compute.claims [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1134.720110] env[61545]: DEBUG nova.network.neutron [req-7fdbcb7a-f009-4cc6-b8bf-368ca77fae04 req-aa86a3a0-5698-43e9-aeed-e4c6a7d0f154 service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updated VIF entry in instance network info cache for port a89c03e7-6504-4eca-9dc3-110100bbf69c. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1134.720497] env[61545]: DEBUG nova.network.neutron [req-7fdbcb7a-f009-4cc6-b8bf-368ca77fae04 req-aa86a3a0-5698-43e9-aeed-e4c6a7d0f154 service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updating instance_info_cache with network_info: [{"id": "a89c03e7-6504-4eca-9dc3-110100bbf69c", "address": "fa:16:3e:30:26:4e", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa89c03e7-65", "ovs_interfaceid": "a89c03e7-6504-4eca-9dc3-110100bbf69c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.072923] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256570, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545756} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.073187] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ab1779b4-707e-4bd8-adea-940805654e1a/ab1779b4-707e-4bd8-adea-940805654e1a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1135.073402] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1135.073656] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c271ae5-4d9d-4fad-9dd0-90d378099cbf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.080624] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1135.080624] env[61545]: value = "task-4256571" [ 1135.080624] env[61545]: _type = "Task" [ 1135.080624] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.091587] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256571, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.223681] env[61545]: DEBUG oslo_concurrency.lockutils [req-7fdbcb7a-f009-4cc6-b8bf-368ca77fae04 req-aa86a3a0-5698-43e9-aeed-e4c6a7d0f154 service nova] Releasing lock "refresh_cache-c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.591066] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256571, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103975} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.591363] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1135.592149] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea20238-b403-48bd-b33d-0d3cfe8b5439 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.614472] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] ab1779b4-707e-4bd8-adea-940805654e1a/ab1779b4-707e-4bd8-adea-940805654e1a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.614754] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5453be79-3cbe-40a5-8858-9f65094e6f55 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.637687] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1135.637687] env[61545]: value = "task-4256572" [ 1135.637687] env[61545]: _type = "Task" [ 1135.637687] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.648824] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256572, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.925575] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e754b9e-d35b-437a-935c-729e25d8c66e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.933414] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f426d6b9-4bb7-4349-aedb-71da495e3201 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.964856] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f48609d-09a3-4dc2-8c5a-09b6b00cd520 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.973184] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90f6faa-3378-4d0b-a604-2bdc3d429b8a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.986607] env[61545]: DEBUG nova.compute.provider_tree [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.150021] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256572, 'name': ReconfigVM_Task, 'duration_secs': 0.289055} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.150327] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Reconfigured VM instance instance-0000005e to attach disk [datastore2] ab1779b4-707e-4bd8-adea-940805654e1a/ab1779b4-707e-4bd8-adea-940805654e1a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.151009] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5165eeb-805f-40bb-83cb-8a4d639b39cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.158468] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1136.158468] env[61545]: value = "task-4256573" [ 1136.158468] env[61545]: _type = "Task" [ 1136.158468] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.166892] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256573, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.490410] env[61545]: DEBUG nova.scheduler.client.report [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.669484] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256573, 'name': Rename_Task, 'duration_secs': 0.221306} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.669786] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1136.670068] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0822a30-6da3-4621-8efb-7f743a9103d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.676753] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1136.676753] env[61545]: value = "task-4256574" [ 1136.676753] env[61545]: _type = "Task" [ 1136.676753] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.685171] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.996233] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.996756] env[61545]: DEBUG nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1137.000068] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.177s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.000333] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.002641] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.901s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.004170] env[61545]: INFO nova.compute.claims [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1137.025807] env[61545]: INFO nova.scheduler.client.report [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Deleted allocations for instance f9c9c447-e676-4143-b329-fb6d71bcd553 [ 1137.187600] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256574, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.509751] env[61545]: DEBUG nova.compute.utils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1137.513068] env[61545]: DEBUG nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1137.513241] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1137.534270] env[61545]: DEBUG oslo_concurrency.lockutils [None req-998b7d34-2be1-4060-b111-feac0cca78a1 tempest-ServersNegativeTestJSON-1742660343 tempest-ServersNegativeTestJSON-1742660343-project-member] Lock "f9c9c447-e676-4143-b329-fb6d71bcd553" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.871s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.578260] env[61545]: DEBUG nova.policy [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a480cf5708dd4134b64d4700782b5e5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c29495610a5f46a39670abf9a34ca73a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1137.687769] env[61545]: DEBUG oslo_vmware.api [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256574, 'name': PowerOnVM_Task, 'duration_secs': 0.582419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.688118] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1137.688265] env[61545]: INFO nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Took 10.10 seconds to spawn the instance on the hypervisor. [ 1137.688491] env[61545]: DEBUG nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1137.689344] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1c0b4d-5f24-4d6f-bf33-8ea2f7d9f949 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.890673] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Successfully created port: cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1138.019920] env[61545]: DEBUG nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1138.217687] env[61545]: INFO nova.compute.manager [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Took 21.20 seconds to build instance. [ 1138.224552] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114d7a8f-a27d-40e5-9936-037e7a2aff1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.233403] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a9c8ba-a728-4890-b2bd-1d12eab91d6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.273475] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4eac44-3d5e-4654-974d-39a61406889d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.281837] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd99acb5-6795-4472-80ed-eb561fefb67a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.297278] env[61545]: DEBUG nova.compute.provider_tree [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.526765] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Successfully created port: 98be7dc1-d53f-476a-8f23-de85f656f6b2 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1138.720511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bc68928-0053-4d50-8719-81b626cc45fd tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "ab1779b4-707e-4bd8-adea-940805654e1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.715s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.800667] env[61545]: DEBUG nova.scheduler.client.report [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1138.984029] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquiring lock "ab1779b4-707e-4bd8-adea-940805654e1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.984029] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "ab1779b4-707e-4bd8-adea-940805654e1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.984029] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquiring lock "ab1779b4-707e-4bd8-adea-940805654e1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.984029] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "ab1779b4-707e-4bd8-adea-940805654e1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.984029] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "ab1779b4-707e-4bd8-adea-940805654e1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.985625] env[61545]: INFO nova.compute.manager [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Terminating instance [ 1139.039455] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Successfully created port: 9dcaf9ff-4661-4e01-8e12-31e081cb7c9a {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.043821] env[61545]: DEBUG nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.071374] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.071638] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.071823] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.071974] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.072134] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.072282] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.072523] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.072732] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.072893] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.073066] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.073243] env[61545]: DEBUG nova.virt.hardware [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.074662] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef60d1c0-5c7b-42a2-9dc8-627be2b3fb91 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.083179] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b401383-85a5-4f14-a86e-8105289dabc9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.308835] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.309958] env[61545]: DEBUG nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1139.315847] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.989s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.317507] env[61545]: INFO nova.compute.claims [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1139.492844] env[61545]: DEBUG nova.compute.manager [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1139.493462] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1139.495963] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc6d63c-e70e-41a9-8132-b229cffddb48 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.505154] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1139.505577] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdbe5ea4-b62e-4b03-b1ac-852fd7a8b901 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.515828] env[61545]: DEBUG oslo_vmware.api [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1139.515828] env[61545]: value = "task-4256575" [ 1139.515828] env[61545]: _type = "Task" [ 1139.515828] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.525607] env[61545]: DEBUG oslo_vmware.api [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.823958] env[61545]: DEBUG nova.compute.utils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1139.829146] env[61545]: DEBUG nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1139.829367] env[61545]: DEBUG nova.network.neutron [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1139.915175] env[61545]: DEBUG nova.policy [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c294d699abb483f9c63ffea01adf0fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e026364ae074b0b8a6a6ef4a8d841ba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1140.026099] env[61545]: DEBUG oslo_vmware.api [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256575, 'name': PowerOffVM_Task, 'duration_secs': 0.200242} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.026495] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1140.026673] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1140.026935] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce174641-fb6c-430e-87d4-324eb1011fd4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.099176] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1140.099176] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1140.099176] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Deleting the datastore file [datastore2] ab1779b4-707e-4bd8-adea-940805654e1a {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1140.099176] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed752b43-4aa6-4b3c-8bf1-4b8e4385bef3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.109839] env[61545]: DEBUG oslo_vmware.api [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for the task: (returnval){ [ 1140.109839] env[61545]: value = "task-4256577" [ 1140.109839] env[61545]: _type = "Task" [ 1140.109839] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.121443] env[61545]: DEBUG oslo_vmware.api [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.330229] env[61545]: DEBUG nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1140.402881] env[61545]: DEBUG nova.network.neutron [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Successfully created port: 39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1140.586563] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09311b76-029a-432e-9e4f-b04a985af81b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.594872] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47026a69-3e69-4b15-b2bb-9826a77d1caf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.636319] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7fd9c45-2174-44ba-91dd-3d8381bc9c41 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.639751] env[61545]: DEBUG nova.compute.manager [req-a0a9fab4-5b43-4fac-95b2-25b6dbf4ebd6 req-952299c3-061d-47ec-81be-2631eaa03e52 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-vif-plugged-cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1140.639958] env[61545]: DEBUG oslo_concurrency.lockutils [req-a0a9fab4-5b43-4fac-95b2-25b6dbf4ebd6 req-952299c3-061d-47ec-81be-2631eaa03e52 service nova] Acquiring lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.640183] env[61545]: DEBUG oslo_concurrency.lockutils [req-a0a9fab4-5b43-4fac-95b2-25b6dbf4ebd6 req-952299c3-061d-47ec-81be-2631eaa03e52 service nova] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.640371] env[61545]: DEBUG oslo_concurrency.lockutils [req-a0a9fab4-5b43-4fac-95b2-25b6dbf4ebd6 req-952299c3-061d-47ec-81be-2631eaa03e52 service nova] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.640524] env[61545]: DEBUG nova.compute.manager [req-a0a9fab4-5b43-4fac-95b2-25b6dbf4ebd6 req-952299c3-061d-47ec-81be-2631eaa03e52 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] No waiting events found dispatching network-vif-plugged-cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1140.640687] env[61545]: WARNING nova.compute.manager [req-a0a9fab4-5b43-4fac-95b2-25b6dbf4ebd6 req-952299c3-061d-47ec-81be-2631eaa03e52 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received unexpected event network-vif-plugged-cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91 for instance with vm_state building and task_state spawning. [ 1140.647090] env[61545]: DEBUG oslo_vmware.api [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Task: {'id': task-4256577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318123} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.649171] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1140.649386] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1140.649575] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1140.649750] env[61545]: INFO nova.compute.manager [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1140.649984] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1140.650303] env[61545]: DEBUG nova.compute.manager [-] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1140.650421] env[61545]: DEBUG nova.network.neutron [-] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1140.653277] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bab0bf3-8bc6-456c-9d4a-96a781717c44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.669920] env[61545]: DEBUG nova.compute.provider_tree [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.921800] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Successfully updated port: cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1141.174056] env[61545]: DEBUG nova.scheduler.client.report [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1141.344521] env[61545]: DEBUG nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1141.373183] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1141.373490] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1141.373653] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1141.373879] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1141.374107] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1141.374277] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1141.374556] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1141.374744] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1141.374917] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1141.375126] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1141.375331] env[61545]: DEBUG nova.virt.hardware [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1141.376235] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c08ed0-9f01-4b8e-a4f5-1c621f9541fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.384763] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d89b5a-51e7-4259-a193-0e3be6f42812 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.569268] env[61545]: DEBUG nova.network.neutron [-] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.681031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.681630] env[61545]: DEBUG nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1141.686361] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.526s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.689305] env[61545]: INFO nova.compute.claims [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1142.074866] env[61545]: INFO nova.compute.manager [-] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Took 1.42 seconds to deallocate network for instance. [ 1142.187603] env[61545]: DEBUG nova.compute.utils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1142.188968] env[61545]: DEBUG nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1142.189149] env[61545]: DEBUG nova.network.neutron [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1142.290192] env[61545]: DEBUG nova.policy [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab8b684068924ed3b59539fe3646b5d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6050296fc5e54934b349b54f32f4ac8e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1142.396303] env[61545]: DEBUG nova.network.neutron [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Successfully updated port: 39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1142.581918] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.692741] env[61545]: DEBUG nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1142.723904] env[61545]: DEBUG nova.compute.manager [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-changed-cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1142.724165] env[61545]: DEBUG nova.compute.manager [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Refreshing instance network info cache due to event network-changed-cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1142.724390] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Acquiring lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.724538] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Acquired lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.724888] env[61545]: DEBUG nova.network.neutron [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Refreshing network info cache for port cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1142.736892] env[61545]: DEBUG nova.network.neutron [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Successfully created port: 042733f1-0011-46e6-b88b-bc359a44bca2 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1142.901203] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.901441] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.901658] env[61545]: DEBUG nova.network.neutron [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.954435] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f947cb6-d580-46f1-8ad8-eba42447c86b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.979129] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27371c90-b6ca-4860-a72c-821335c3da7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.022921] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de61b9b-4721-40e3-a2c0-cc901d4dc051 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.031224] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f04386-ef15-4c0a-a4ee-dba30efc52f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.047640] env[61545]: DEBUG nova.compute.provider_tree [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.246912] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.247721] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.297752] env[61545]: DEBUG nova.network.neutron [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1143.464687] env[61545]: DEBUG nova.network.neutron [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1143.491350] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Successfully updated port: 98be7dc1-d53f-476a-8f23-de85f656f6b2 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1143.494694] env[61545]: DEBUG nova.network.neutron [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.551928] env[61545]: DEBUG nova.scheduler.client.report [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1143.686841] env[61545]: DEBUG nova.network.neutron [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Updating instance_info_cache with network_info: [{"id": "39d2066d-48ee-40bd-bb98-733c92c48910", "address": "fa:16:3e:e2:a0:d7", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d2066d-48", "ovs_interfaceid": "39d2066d-48ee-40bd-bb98-733c92c48910", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.702930] env[61545]: DEBUG nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1143.730832] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1143.731094] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1143.731258] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1143.731445] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1143.731593] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1143.731760] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1143.731978] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1143.732163] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1143.732325] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1143.732489] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1143.732666] env[61545]: DEBUG nova.virt.hardware [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1143.734019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378bb6a7-057c-4078-a614-804c01363ad9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.743691] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12bf550-1a9c-4885-bfc2-3bf1dacbe03c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.751538] env[61545]: DEBUG nova.compute.utils [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1143.996381] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Releasing lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.996536] env[61545]: DEBUG nova.compute.manager [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Received event network-vif-deleted-f890630b-67d4-4991-a8b1-b4c414add2d3 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1143.996832] env[61545]: DEBUG nova.compute.manager [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Received event network-vif-plugged-39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1143.996910] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.997127] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.997303] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.997508] env[61545]: DEBUG nova.compute.manager [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] No waiting events found dispatching network-vif-plugged-39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1143.997695] env[61545]: WARNING nova.compute.manager [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Received unexpected event network-vif-plugged-39d2066d-48ee-40bd-bb98-733c92c48910 for instance with vm_state building and task_state spawning. [ 1143.997860] env[61545]: DEBUG nova.compute.manager [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Received event network-changed-39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1143.998041] env[61545]: DEBUG nova.compute.manager [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Refreshing instance network info cache due to event network-changed-39d2066d-48ee-40bd-bb98-733c92c48910. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1143.998216] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Acquiring lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.060388] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.061127] env[61545]: DEBUG nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1144.064386] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.073s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.064607] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.066807] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.764s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.068319] env[61545]: INFO nova.compute.claims [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.096515] env[61545]: INFO nova.scheduler.client.report [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted allocations for instance 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42 [ 1144.192025] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Releasing lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.192025] env[61545]: DEBUG nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Instance network_info: |[{"id": "39d2066d-48ee-40bd-bb98-733c92c48910", "address": "fa:16:3e:e2:a0:d7", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d2066d-48", "ovs_interfaceid": "39d2066d-48ee-40bd-bb98-733c92c48910", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1144.192025] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Acquired lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.192025] env[61545]: DEBUG nova.network.neutron [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Refreshing network info cache for port 39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.192025] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:a0:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '65497291-07f3-434c-bd42-657a0cb03365', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39d2066d-48ee-40bd-bb98-733c92c48910', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1144.201258] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1144.202786] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1144.203254] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94860f58-9734-486a-ada5-b7db7a9139de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.225629] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1144.225629] env[61545]: value = "task-4256578" [ 1144.225629] env[61545]: _type = "Task" [ 1144.225629] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.236755] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256578, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.263649] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.015s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.572738] env[61545]: DEBUG nova.compute.utils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1144.576073] env[61545]: DEBUG nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1144.576251] env[61545]: DEBUG nova.network.neutron [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1144.606977] env[61545]: DEBUG oslo_concurrency.lockutils [None req-577994c0-728b-4795-a840-7051b9601e9d tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.409s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.629186] env[61545]: DEBUG nova.policy [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1144.735724] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256578, 'name': CreateVM_Task, 'duration_secs': 0.320951} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.735972] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1144.736805] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.736995] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.737365] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1144.737592] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-355412ab-30aa-4ad9-a63c-c3f268444098 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.743175] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1144.743175] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3f382-b766-e7d1-0863-9d0d67c55f42" [ 1144.743175] env[61545]: _type = "Task" [ 1144.743175] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.751921] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3f382-b766-e7d1-0863-9d0d67c55f42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.849143] env[61545]: DEBUG nova.network.neutron [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Successfully updated port: 042733f1-0011-46e6-b88b-bc359a44bca2 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1144.925523] env[61545]: DEBUG nova.compute.manager [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-vif-plugged-98be7dc1-d53f-476a-8f23-de85f656f6b2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1144.925796] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Acquiring lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.926661] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.926847] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.927029] env[61545]: DEBUG nova.compute.manager [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] No waiting events found dispatching network-vif-plugged-98be7dc1-d53f-476a-8f23-de85f656f6b2 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1144.927203] env[61545]: WARNING nova.compute.manager [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received unexpected event network-vif-plugged-98be7dc1-d53f-476a-8f23-de85f656f6b2 for instance with vm_state building and task_state spawning. [ 1144.927370] env[61545]: DEBUG nova.compute.manager [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-changed-98be7dc1-d53f-476a-8f23-de85f656f6b2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1144.927521] env[61545]: DEBUG nova.compute.manager [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Refreshing instance network info cache due to event network-changed-98be7dc1-d53f-476a-8f23-de85f656f6b2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1144.927712] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Acquiring lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.927850] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Acquired lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.927996] env[61545]: DEBUG nova.network.neutron [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Refreshing network info cache for port 98be7dc1-d53f-476a-8f23-de85f656f6b2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1145.076833] env[61545]: DEBUG nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1145.257357] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3f382-b766-e7d1-0863-9d0d67c55f42, 'name': SearchDatastore_Task, 'duration_secs': 0.012751} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.257792] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.258109] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1145.258415] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.258624] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.258894] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1145.259273] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-914bc61f-bb18-4aef-a01e-24d164f62238 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.269646] env[61545]: DEBUG nova.network.neutron [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Updated VIF entry in instance network info cache for port 39d2066d-48ee-40bd-bb98-733c92c48910. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.270080] env[61545]: DEBUG nova.network.neutron [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Updating instance_info_cache with network_info: [{"id": "39d2066d-48ee-40bd-bb98-733c92c48910", "address": "fa:16:3e:e2:a0:d7", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d2066d-48", "ovs_interfaceid": "39d2066d-48ee-40bd-bb98-733c92c48910", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.274320] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1145.274612] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1145.275810] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67d039cd-1e4f-4b5f-b0b6-321b802af7b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.282717] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1145.282717] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cf62ff-6ea1-2caa-4c95-d6a791d2787a" [ 1145.282717] env[61545]: _type = "Task" [ 1145.282717] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.297025] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cf62ff-6ea1-2caa-4c95-d6a791d2787a, 'name': SearchDatastore_Task, 'duration_secs': 0.009815} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.297593] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4df7e14d-1da1-402a-9516-a12b961cef50 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.302218] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11392e82-bcf2-41b2-952b-54edc6529524 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.311293] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2aed5e5-4b15-4aad-900d-1058565c8dcc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.315691] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1145.315691] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5243db2d-a2f0-f3ea-88d8-44ae3949a0cc" [ 1145.315691] env[61545]: _type = "Task" [ 1145.315691] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.347728] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ecb8b0-5b3e-4404-acd4-292860160af0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.355586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquiring lock "refresh_cache-4f713be2-4c38-413b-874d-a39a4c01a1be" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.355961] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquired lock "refresh_cache-4f713be2-4c38-413b-874d-a39a4c01a1be" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.356279] env[61545]: DEBUG nova.network.neutron [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1145.357930] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5243db2d-a2f0-f3ea-88d8-44ae3949a0cc, 'name': SearchDatastore_Task, 'duration_secs': 0.011027} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.358579] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.358860] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e0ae4965-42eb-4286-8cd9-a5c82426f1bf/e0ae4965-42eb-4286-8cd9-a5c82426f1bf.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1145.359167] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87ec9d4e-04b2-4d8e-aa0d-f6772e22ac8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.366225] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49efba1f-cadf-4b62-95fd-0cc4603cc042 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.372520] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1145.372520] env[61545]: value = "task-4256579" [ 1145.372520] env[61545]: _type = "Task" [ 1145.372520] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.385769] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.387495] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.387495] env[61545]: INFO nova.compute.manager [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Attaching volume 627e30aa-7351-463d-9453-98a2cb96ea31 to /dev/sdb [ 1145.388712] env[61545]: DEBUG nova.compute.provider_tree [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.396624] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256579, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.438406] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c92aec-e2c8-4f37-bf24-881c9c7c229f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.446979] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71abe2a8-d547-415a-8624-2572f41d23ca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.463474] env[61545]: DEBUG nova.virt.block_device [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating existing volume attachment record: b4a28434-7c55-4b42-94ac-a77cd7fc49e9 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1145.520438] env[61545]: DEBUG nova.network.neutron [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1145.539934] env[61545]: DEBUG nova.network.neutron [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Successfully created port: c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1145.777115] env[61545]: DEBUG oslo_concurrency.lockutils [req-d8f11681-d9ea-4c46-a9b7-e2a25d695ffe req-88fcff30-e1d8-4b0b-9a2f-28f33f0d6f41 service nova] Releasing lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.883448] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256579, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.892604] env[61545]: DEBUG nova.scheduler.client.report [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.005468] env[61545]: DEBUG nova.network.neutron [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1146.029521] env[61545]: DEBUG nova.network.neutron [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.090021] env[61545]: DEBUG nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1146.113043] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1146.113043] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1146.113043] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1146.113043] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1146.113043] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1146.113043] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1146.113783] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1146.114200] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1146.114508] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1146.114787] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1146.115218] env[61545]: DEBUG nova.virt.hardware [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1146.116567] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ad83ca-dfd7-433c-8abd-3cb9d77823c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.133076] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e6c0c4-38cc-43b5-a908-b5bbde0c3c9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.391012] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256579, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542058} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.391696] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e0ae4965-42eb-4286-8cd9-a5c82426f1bf/e0ae4965-42eb-4286-8cd9-a5c82426f1bf.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1146.392345] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1146.392805] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01fa72c2-71ad-4b26-a503-0ff85fcc0ec5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.398381] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.400257] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.294s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.402619] env[61545]: INFO nova.compute.claims [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1146.407964] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1146.407964] env[61545]: value = "task-4256583" [ 1146.407964] env[61545]: _type = "Task" [ 1146.407964] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.418956] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256583, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.530840] env[61545]: DEBUG nova.network.neutron [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Updating instance_info_cache with network_info: [{"id": "042733f1-0011-46e6-b88b-bc359a44bca2", "address": "fa:16:3e:76:27:05", "network": {"id": "1efe3d88-b4a2-4256-901f-480e37939619", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1665300742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6050296fc5e54934b349b54f32f4ac8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap042733f1-00", "ovs_interfaceid": "042733f1-0011-46e6-b88b-bc359a44bca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.534902] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Releasing lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.535366] env[61545]: DEBUG nova.compute.manager [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Received event network-vif-plugged-042733f1-0011-46e6-b88b-bc359a44bca2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1146.535672] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Acquiring lock "4f713be2-4c38-413b-874d-a39a4c01a1be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.535985] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Lock "4f713be2-4c38-413b-874d-a39a4c01a1be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.536278] env[61545]: DEBUG oslo_concurrency.lockutils [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] Lock "4f713be2-4c38-413b-874d-a39a4c01a1be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.536625] env[61545]: DEBUG nova.compute.manager [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] No waiting events found dispatching network-vif-plugged-042733f1-0011-46e6-b88b-bc359a44bca2 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1146.536910] env[61545]: WARNING nova.compute.manager [req-c2957cbe-95ec-4573-b127-bed1707603f8 req-ce0ec9da-3282-40f3-97c6-cf9a11f79998 service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Received unexpected event network-vif-plugged-042733f1-0011-46e6-b88b-bc359a44bca2 for instance with vm_state building and task_state spawning. [ 1146.681532] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Successfully updated port: 9dcaf9ff-4661-4e01-8e12-31e081cb7c9a {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1146.907184] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "b3a2dfb7-8faf-46f8-a8db-b40eece77fb8" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.907184] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "b3a2dfb7-8faf-46f8-a8db-b40eece77fb8" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.924713] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256583, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070399} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.924939] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1146.925745] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c870be-0598-4783-a3ba-a1b66d342f5a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.949608] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] e0ae4965-42eb-4286-8cd9-a5c82426f1bf/e0ae4965-42eb-4286-8cd9-a5c82426f1bf.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1146.950107] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1704eec8-7b9c-4250-9f0b-3d139e1eeb73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.971374] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1146.971374] env[61545]: value = "task-4256584" [ 1146.971374] env[61545]: _type = "Task" [ 1146.971374] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.981030] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256584, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.007609] env[61545]: DEBUG nova.compute.manager [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Received event network-changed-042733f1-0011-46e6-b88b-bc359a44bca2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1147.007851] env[61545]: DEBUG nova.compute.manager [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Refreshing instance network info cache due to event network-changed-042733f1-0011-46e6-b88b-bc359a44bca2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1147.007991] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Acquiring lock "refresh_cache-4f713be2-4c38-413b-874d-a39a4c01a1be" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.032139] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Releasing lock "refresh_cache-4f713be2-4c38-413b-874d-a39a4c01a1be" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.032482] env[61545]: DEBUG nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Instance network_info: |[{"id": "042733f1-0011-46e6-b88b-bc359a44bca2", "address": "fa:16:3e:76:27:05", "network": {"id": "1efe3d88-b4a2-4256-901f-480e37939619", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1665300742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6050296fc5e54934b349b54f32f4ac8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap042733f1-00", "ovs_interfaceid": "042733f1-0011-46e6-b88b-bc359a44bca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1147.032775] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Acquired lock "refresh_cache-4f713be2-4c38-413b-874d-a39a4c01a1be" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.032952] env[61545]: DEBUG nova.network.neutron [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Refreshing network info cache for port 042733f1-0011-46e6-b88b-bc359a44bca2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1147.034641] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:27:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f256cfee-512d-4192-9aca-6750fdb1cd4c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '042733f1-0011-46e6-b88b-bc359a44bca2', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1147.043256] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Creating folder: Project (6050296fc5e54934b349b54f32f4ac8e). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1147.046606] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd384774-a528-401b-89e0-af059050e9b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.059331] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Created folder: Project (6050296fc5e54934b349b54f32f4ac8e) in parent group-v838542. [ 1147.059931] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Creating folder: Instances. Parent ref: group-v838812. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1147.060084] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22ba7de2-9673-4175-b332-58a5dd0c24e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.073065] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Created folder: Instances in parent group-v838812. [ 1147.073435] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1147.073662] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1147.073884] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf9587c4-7e0d-4a8b-8539-89ff107b11cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.096901] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1147.096901] env[61545]: value = "task-4256587" [ 1147.096901] env[61545]: _type = "Task" [ 1147.096901] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.105683] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256587, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.183436] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.183567] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquired lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.183869] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1147.285906] env[61545]: DEBUG nova.network.neutron [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Updated VIF entry in instance network info cache for port 042733f1-0011-46e6-b88b-bc359a44bca2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1147.286545] env[61545]: DEBUG nova.network.neutron [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Updating instance_info_cache with network_info: [{"id": "042733f1-0011-46e6-b88b-bc359a44bca2", "address": "fa:16:3e:76:27:05", "network": {"id": "1efe3d88-b4a2-4256-901f-480e37939619", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1665300742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6050296fc5e54934b349b54f32f4ac8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f256cfee-512d-4192-9aca-6750fdb1cd4c", "external-id": "nsx-vlan-transportzone-821", "segmentation_id": 821, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap042733f1-00", "ovs_interfaceid": "042733f1-0011-46e6-b88b-bc359a44bca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.413186] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "b3a2dfb7-8faf-46f8-a8db-b40eece77fb8" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.506s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.413766] env[61545]: DEBUG nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1147.495039] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256584, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.495620] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.495866] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.610760] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256587, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.645947] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1ef40f-5de9-47b9-bed7-3eea6e1f2973 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.654378] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd43ce6-c025-4969-a427-828787b8ba40 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.692122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b50cbd-8a15-4e89-bd18-d4d9380f74c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.700968] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bb0b22-0322-4f23-9852-b2ae38fddc57 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.717373] env[61545]: DEBUG nova.compute.provider_tree [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.735940] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1147.789857] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Releasing lock "refresh_cache-4f713be2-4c38-413b-874d-a39a4c01a1be" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.790205] env[61545]: DEBUG nova.compute.manager [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-vif-plugged-9dcaf9ff-4661-4e01-8e12-31e081cb7c9a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1147.790493] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Acquiring lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.790773] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.790950] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.791143] env[61545]: DEBUG nova.compute.manager [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] No waiting events found dispatching network-vif-plugged-9dcaf9ff-4661-4e01-8e12-31e081cb7c9a {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1147.791315] env[61545]: WARNING nova.compute.manager [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received unexpected event network-vif-plugged-9dcaf9ff-4661-4e01-8e12-31e081cb7c9a for instance with vm_state building and task_state spawning. [ 1147.791487] env[61545]: DEBUG nova.compute.manager [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-changed-9dcaf9ff-4661-4e01-8e12-31e081cb7c9a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1147.791643] env[61545]: DEBUG nova.compute.manager [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Refreshing instance network info cache due to event network-changed-9dcaf9ff-4661-4e01-8e12-31e081cb7c9a. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1147.791832] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Acquiring lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.919892] env[61545]: DEBUG nova.compute.utils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1147.921294] env[61545]: DEBUG nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1147.921464] env[61545]: DEBUG nova.network.neutron [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1147.982625] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256584, 'name': ReconfigVM_Task, 'duration_secs': 0.805355} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.983140] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfigured VM instance instance-00000060 to attach disk [datastore2] e0ae4965-42eb-4286-8cd9-a5c82426f1bf/e0ae4965-42eb-4286-8cd9-a5c82426f1bf.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1147.983814] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3b23ed6-4cfb-4534-8fb0-3a1e7dc422b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.991326] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1147.991326] env[61545]: value = "task-4256588" [ 1147.991326] env[61545]: _type = "Task" [ 1147.991326] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.001229] env[61545]: DEBUG nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1148.006023] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256588, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.080434] env[61545]: DEBUG nova.policy [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71edaf52e9f74fbf8c5bd957471f3f22', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd765e0f1dd914baf8b3b5f82780d9f16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1148.109145] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256587, 'name': CreateVM_Task, 'duration_secs': 0.651268} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.109333] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1148.110088] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.110262] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.110598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1148.110874] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22bf955e-863c-4d22-8cc4-1698a03c0898 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.116866] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1148.116866] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ef543-d3d1-d275-8bab-0f4571af4b39" [ 1148.116866] env[61545]: _type = "Task" [ 1148.116866] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.126201] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ef543-d3d1-d275-8bab-0f4571af4b39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.222381] env[61545]: DEBUG nova.scheduler.client.report [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.425689] env[61545]: DEBUG nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1148.504872] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256588, 'name': Rename_Task, 'duration_secs': 0.145249} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.508073] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1148.513843] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8baa314-a618-404f-a056-3eb047474e17 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.522021] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1148.522021] env[61545]: value = "task-4256590" [ 1148.522021] env[61545]: _type = "Task" [ 1148.522021] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.531762] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.532960] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.630057] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525ef543-d3d1-d275-8bab-0f4571af4b39, 'name': SearchDatastore_Task, 'duration_secs': 0.010913} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.630510] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.630994] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1148.631637] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.631889] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.632207] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1148.632575] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5e7709b-5a5d-4f81-8ced-89c7ecc12dca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.645437] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1148.645437] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1148.645693] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bcbb30e-adc8-4a22-8ef9-0eac09701787 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.654583] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1148.654583] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523551f5-ab45-175c-5aaa-4f3155ceb1b9" [ 1148.654583] env[61545]: _type = "Task" [ 1148.654583] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.666755] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523551f5-ab45-175c-5aaa-4f3155ceb1b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.731021] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.731021] env[61545]: DEBUG nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1148.732074] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.499s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.733566] env[61545]: INFO nova.compute.claims [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1148.816688] env[61545]: DEBUG nova.network.neutron [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Successfully updated port: c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1148.939350] env[61545]: DEBUG nova.network.neutron [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Successfully created port: 750ea46d-7298-43ac-a56d-08093fc56a42 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1148.942966] env[61545]: DEBUG nova.network.neutron [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updating instance_info_cache with network_info: [{"id": "cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91", "address": "fa:16:3e:0a:36:b9", "network": {"id": "31158699-c741-42f2-bebe-3b7916e59177", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315829624", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbeccc2f-cf", "ovs_interfaceid": "cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "address": "fa:16:3e:79:cb:0d", "network": {"id": "21ac6a94-51de-4b05-9841-f6bed1d33849", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666320482", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98be7dc1-d5", "ovs_interfaceid": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9dcaf9ff-4661-4e01-8e12-31e081cb7c9a", "address": "fa:16:3e:e9:78:07", "network": {"id": "31158699-c741-42f2-bebe-3b7916e59177", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315829624", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dcaf9ff-46", "ovs_interfaceid": "9dcaf9ff-4661-4e01-8e12-31e081cb7c9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.032822] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256590, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.165750] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523551f5-ab45-175c-5aaa-4f3155ceb1b9, 'name': SearchDatastore_Task, 'duration_secs': 0.022199} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.166611] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52a276e5-5dc6-43cd-bb5c-fd4acfcbe06b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.172679] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1149.172679] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529d26df-f42f-b905-77e5-f7a41c17b95b" [ 1149.172679] env[61545]: _type = "Task" [ 1149.172679] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.180931] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529d26df-f42f-b905-77e5-f7a41c17b95b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.238272] env[61545]: DEBUG nova.compute.utils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1149.241910] env[61545]: DEBUG nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1149.242110] env[61545]: DEBUG nova.network.neutron [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1149.322629] env[61545]: DEBUG nova.policy [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25fd6c8662bd4b7f9da546ec78acda02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68a860104885480d9da472bc969ba6d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1149.323581] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.324204] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.324494] env[61545]: DEBUG nova.network.neutron [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.375783] env[61545]: DEBUG nova.compute.manager [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-vif-plugged-c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1149.376357] env[61545]: DEBUG oslo_concurrency.lockutils [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.376792] env[61545]: DEBUG oslo_concurrency.lockutils [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.377309] env[61545]: DEBUG oslo_concurrency.lockutils [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.377841] env[61545]: DEBUG nova.compute.manager [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] No waiting events found dispatching network-vif-plugged-c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1149.378291] env[61545]: WARNING nova.compute.manager [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received unexpected event network-vif-plugged-c279f08f-d443-4a8b-bd37-296ed181c6a7 for instance with vm_state building and task_state spawning. [ 1149.378681] env[61545]: DEBUG nova.compute.manager [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-changed-c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1149.379087] env[61545]: DEBUG nova.compute.manager [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Refreshing instance network info cache due to event network-changed-c279f08f-d443-4a8b-bd37-296ed181c6a7. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1149.379495] env[61545]: DEBUG oslo_concurrency.lockutils [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] Acquiring lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.437470] env[61545]: DEBUG nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1149.448411] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Releasing lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.448411] env[61545]: DEBUG nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Instance network_info: |[{"id": "cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91", "address": "fa:16:3e:0a:36:b9", "network": {"id": "31158699-c741-42f2-bebe-3b7916e59177", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315829624", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbeccc2f-cf", "ovs_interfaceid": "cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "address": "fa:16:3e:79:cb:0d", "network": {"id": "21ac6a94-51de-4b05-9841-f6bed1d33849", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666320482", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98be7dc1-d5", "ovs_interfaceid": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9dcaf9ff-4661-4e01-8e12-31e081cb7c9a", "address": "fa:16:3e:e9:78:07", "network": {"id": "31158699-c741-42f2-bebe-3b7916e59177", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315829624", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dcaf9ff-46", "ovs_interfaceid": "9dcaf9ff-4661-4e01-8e12-31e081cb7c9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1149.448411] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Acquired lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.448411] env[61545]: DEBUG nova.network.neutron [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Refreshing network info cache for port 9dcaf9ff-4661-4e01-8e12-31e081cb7c9a {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1149.450512] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:36:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd96b39f-bd2e-48d1-85c3-577cf97f08c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:cb:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98be7dc1-d53f-476a-8f23-de85f656f6b2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:78:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd96b39f-bd2e-48d1-85c3-577cf97f08c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9dcaf9ff-4661-4e01-8e12-31e081cb7c9a', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1149.468675] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Creating folder: Project (c29495610a5f46a39670abf9a34ca73a). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1149.477551] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f74242f-316c-47c4-bf8d-0543af5fa9d7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.486499] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.486813] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.486971] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.487177] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.487327] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.487483] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.487804] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.487975] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.488164] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.488339] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.488512] env[61545]: DEBUG nova.virt.hardware [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.489774] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293ae967-9887-4f47-849d-294bb95ea163 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.494342] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Created folder: Project (c29495610a5f46a39670abf9a34ca73a) in parent group-v838542. [ 1149.494618] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Creating folder: Instances. Parent ref: group-v838815. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1149.495251] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc3050dc-ee9c-4ea2-8517-55bf8fd609b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.501791] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ea7692-14e3-41f8-84eb-5c772033a3e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.510031] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Created folder: Instances in parent group-v838815. [ 1149.510305] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.521051] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1149.522320] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08dafd71-d76f-4905-b873-3aa62bdfea89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.558122] env[61545]: DEBUG oslo_vmware.api [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256590, 'name': PowerOnVM_Task, 'duration_secs': 0.535221} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.559869] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1149.560106] env[61545]: INFO nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1149.560290] env[61545]: DEBUG nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1149.560615] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1149.560615] env[61545]: value = "task-4256593" [ 1149.560615] env[61545]: _type = "Task" [ 1149.560615] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.561414] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d7fbd9-6ee0-40fb-943d-b93800bc95be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.577831] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256593, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.686142] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529d26df-f42f-b905-77e5-f7a41c17b95b, 'name': SearchDatastore_Task, 'duration_secs': 0.010383} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.686426] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.686746] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4f713be2-4c38-413b-874d-a39a4c01a1be/4f713be2-4c38-413b-874d-a39a4c01a1be.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1149.687043] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e31ee38-7565-4ac1-be9a-ebca5d8d1cf3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.696075] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1149.696075] env[61545]: value = "task-4256594" [ 1149.696075] env[61545]: _type = "Task" [ 1149.696075] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.710317] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.742397] env[61545]: DEBUG nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1149.935547] env[61545]: DEBUG nova.network.neutron [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.021693] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdba85fe-60a0-42f4-bb1c-a151944969ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.032860] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64748416-1708-4589-9906-c79c923e458d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.074034] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c533a969-0bcb-4bf9-aaa2-af7244c81dd2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.100032] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256593, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.100032] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db7fd9a-9765-4c49-ad00-acab811c1f1f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.105813] env[61545]: INFO nova.compute.manager [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Took 29.04 seconds to build instance. [ 1150.122227] env[61545]: DEBUG nova.compute.provider_tree [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.125594] env[61545]: DEBUG nova.network.neutron [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updated VIF entry in instance network info cache for port 9dcaf9ff-4661-4e01-8e12-31e081cb7c9a. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.126090] env[61545]: DEBUG nova.network.neutron [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updating instance_info_cache with network_info: [{"id": "cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91", "address": "fa:16:3e:0a:36:b9", "network": {"id": "31158699-c741-42f2-bebe-3b7916e59177", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315829624", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbeccc2f-cf", "ovs_interfaceid": "cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "address": "fa:16:3e:79:cb:0d", "network": {"id": "21ac6a94-51de-4b05-9841-f6bed1d33849", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666320482", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98be7dc1-d5", "ovs_interfaceid": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9dcaf9ff-4661-4e01-8e12-31e081cb7c9a", "address": "fa:16:3e:e9:78:07", "network": {"id": "31158699-c741-42f2-bebe-3b7916e59177", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315829624", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dcaf9ff-46", "ovs_interfaceid": "9dcaf9ff-4661-4e01-8e12-31e081cb7c9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.208876] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256594, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.210572] env[61545]: DEBUG nova.network.neutron [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Successfully created port: 7a524b94-5ca7-497a-8066-258b28fe2992 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1150.370901] env[61545]: DEBUG nova.network.neutron [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.517599] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1150.517863] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838811', 'volume_id': '627e30aa-7351-463d-9453-98a2cb96ea31', 'name': 'volume-627e30aa-7351-463d-9453-98a2cb96ea31', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2129a1eb-4ad7-42ef-9554-6202f7a44f58', 'attached_at': '', 'detached_at': '', 'volume_id': '627e30aa-7351-463d-9453-98a2cb96ea31', 'serial': '627e30aa-7351-463d-9453-98a2cb96ea31'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1150.518751] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d116c04-3311-4d73-b6a2-5eafe1e36916 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.537006] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3eaed4-4876-4c01-8abb-d9f7ac11fa06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.565649] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] volume-627e30aa-7351-463d-9453-98a2cb96ea31/volume-627e30aa-7351-463d-9453-98a2cb96ea31.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1150.565969] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfdcb7d3-1913-4d49-a1cd-1fe364963ca8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.592590] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256593, 'name': CreateVM_Task, 'duration_secs': 0.617717} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.594602] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1150.595519] env[61545]: DEBUG oslo_vmware.api [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1150.595519] env[61545]: value = "task-4256595" [ 1150.595519] env[61545]: _type = "Task" [ 1150.595519] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.596425] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.596753] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.597137] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1150.598237] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-149a7732-ea1b-4ffd-8ae0-6e17324f69af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.607390] env[61545]: DEBUG oslo_concurrency.lockutils [None req-cff2dc68-4c65-4144-baaf-dcff358481b6 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.577s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.611863] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1150.611863] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5259d23a-d1be-2b57-d4bf-3ac0261605c2" [ 1150.611863] env[61545]: _type = "Task" [ 1150.611863] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.615811] env[61545]: DEBUG oslo_vmware.api [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256595, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.626440] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5259d23a-d1be-2b57-d4bf-3ac0261605c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.629740] env[61545]: DEBUG nova.scheduler.client.report [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.633582] env[61545]: DEBUG oslo_concurrency.lockutils [req-6044a01d-f7c0-4d0b-a0ec-2cd3be331682 req-b2fa3a7b-9fb4-4e52-a673-56788d42e1cd service nova] Releasing lock "refresh_cache-413f3b55-0db1-4331-b19f-5cd6c4eeb48a" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.710139] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256594, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52589} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.710422] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 4f713be2-4c38-413b-874d-a39a4c01a1be/4f713be2-4c38-413b-874d-a39a4c01a1be.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1150.710668] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1150.711075] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56a5e7f7-1106-40b2-b820-179441fc8519 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.718988] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1150.718988] env[61545]: value = "task-4256596" [ 1150.718988] env[61545]: _type = "Task" [ 1150.718988] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.728715] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256596, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.758698] env[61545]: DEBUG nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1150.798463] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1150.798800] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1150.798997] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1150.800393] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1150.800828] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1150.801227] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1150.802074] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1150.802366] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1150.802597] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1150.802845] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1150.803094] env[61545]: DEBUG nova.virt.hardware [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1150.805161] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8306eb0-e0d6-4bed-b07f-d679e8c3d530 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.816913] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6df85dc-223c-47ca-94bb-2b988d5eeb62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.873494] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.873858] env[61545]: DEBUG nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Instance network_info: |[{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1150.874197] env[61545]: DEBUG oslo_concurrency.lockutils [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] Acquired lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.874379] env[61545]: DEBUG nova.network.neutron [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Refreshing network info cache for port c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1150.875741] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:9c:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c279f08f-d443-4a8b-bd37-296ed181c6a7', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1150.885142] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1150.885729] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1150.886171] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbee45eb-0926-47d0-a676-cd1a195a6b0e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.911950] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1150.911950] env[61545]: value = "task-4256597" [ 1150.911950] env[61545]: _type = "Task" [ 1150.911950] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.921184] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256597, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.110109] env[61545]: DEBUG oslo_vmware.api [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256595, 'name': ReconfigVM_Task, 'duration_secs': 0.479738} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.110109] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfigured VM instance instance-00000058 to attach disk [datastore1] volume-627e30aa-7351-463d-9453-98a2cb96ea31/volume-627e30aa-7351-463d-9453-98a2cb96ea31.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1151.114643] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8024a075-c81e-4f35-a9fd-6230b024567f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.133965] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5259d23a-d1be-2b57-d4bf-3ac0261605c2, 'name': SearchDatastore_Task, 'duration_secs': 0.021227} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.135366] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.135642] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1151.137655] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.137655] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.137655] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1151.137655] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.137655] env[61545]: DEBUG nova.compute.manager [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1151.140039] env[61545]: DEBUG oslo_vmware.api [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1151.140039] env[61545]: value = "task-4256598" [ 1151.140039] env[61545]: _type = "Task" [ 1151.140039] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.143945] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0116491c-8f8f-4b4f-8f5e-e1512a8c796d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.143945] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.561s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.143945] env[61545]: DEBUG nova.objects.instance [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lazy-loading 'resources' on Instance uuid ab1779b4-707e-4bd8-adea-940805654e1a {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.154078] env[61545]: DEBUG oslo_vmware.api [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256598, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.158028] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1151.158322] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1151.159452] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0c49e79-0564-479e-9811-db2b3ef03bea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.166159] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1151.166159] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52be2c91-f4d8-4ebf-bf35-6b4bc185cd22" [ 1151.166159] env[61545]: _type = "Task" [ 1151.166159] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.178017] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52be2c91-f4d8-4ebf-bf35-6b4bc185cd22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.231203] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256596, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072729} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.231648] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1151.233109] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f0f92a-6dd3-4653-922b-dc094ad8bb8d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.261763] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 4f713be2-4c38-413b-874d-a39a4c01a1be/4f713be2-4c38-413b-874d-a39a4c01a1be.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.263397] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f35c0d15-e9c4-44cd-b732-b2ad2d64a25b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.281573] env[61545]: DEBUG nova.compute.manager [req-f4c6818a-96a0-44df-84fb-7d14a7de0c96 req-c7a6a696-aabb-4f1f-88da-8c96c0cf1778 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Received event network-changed-39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1151.281761] env[61545]: DEBUG nova.compute.manager [req-f4c6818a-96a0-44df-84fb-7d14a7de0c96 req-c7a6a696-aabb-4f1f-88da-8c96c0cf1778 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Refreshing instance network info cache due to event network-changed-39d2066d-48ee-40bd-bb98-733c92c48910. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1151.282061] env[61545]: DEBUG oslo_concurrency.lockutils [req-f4c6818a-96a0-44df-84fb-7d14a7de0c96 req-c7a6a696-aabb-4f1f-88da-8c96c0cf1778 service nova] Acquiring lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.282152] env[61545]: DEBUG oslo_concurrency.lockutils [req-f4c6818a-96a0-44df-84fb-7d14a7de0c96 req-c7a6a696-aabb-4f1f-88da-8c96c0cf1778 service nova] Acquired lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.282313] env[61545]: DEBUG nova.network.neutron [req-f4c6818a-96a0-44df-84fb-7d14a7de0c96 req-c7a6a696-aabb-4f1f-88da-8c96c0cf1778 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Refreshing network info cache for port 39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.293211] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1151.293211] env[61545]: value = "task-4256599" [ 1151.293211] env[61545]: _type = "Task" [ 1151.293211] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.304812] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256599, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.355882] env[61545]: DEBUG nova.network.neutron [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Successfully updated port: 750ea46d-7298-43ac-a56d-08093fc56a42 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1151.403165] env[61545]: DEBUG nova.compute.manager [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Received event network-vif-plugged-750ea46d-7298-43ac-a56d-08093fc56a42 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1151.403453] env[61545]: DEBUG oslo_concurrency.lockutils [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] Acquiring lock "f2975097-29a3-46cc-9dea-0c414baff246-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.403597] env[61545]: DEBUG oslo_concurrency.lockutils [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] Lock "f2975097-29a3-46cc-9dea-0c414baff246-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.403764] env[61545]: DEBUG oslo_concurrency.lockutils [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] Lock "f2975097-29a3-46cc-9dea-0c414baff246-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.403928] env[61545]: DEBUG nova.compute.manager [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] No waiting events found dispatching network-vif-plugged-750ea46d-7298-43ac-a56d-08093fc56a42 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1151.404133] env[61545]: WARNING nova.compute.manager [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Received unexpected event network-vif-plugged-750ea46d-7298-43ac-a56d-08093fc56a42 for instance with vm_state building and task_state spawning. [ 1151.404270] env[61545]: DEBUG nova.compute.manager [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Received event network-changed-750ea46d-7298-43ac-a56d-08093fc56a42 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1151.404422] env[61545]: DEBUG nova.compute.manager [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Refreshing instance network info cache due to event network-changed-750ea46d-7298-43ac-a56d-08093fc56a42. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1151.404605] env[61545]: DEBUG oslo_concurrency.lockutils [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] Acquiring lock "refresh_cache-f2975097-29a3-46cc-9dea-0c414baff246" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.404745] env[61545]: DEBUG oslo_concurrency.lockutils [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] Acquired lock "refresh_cache-f2975097-29a3-46cc-9dea-0c414baff246" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.404898] env[61545]: DEBUG nova.network.neutron [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Refreshing network info cache for port 750ea46d-7298-43ac-a56d-08093fc56a42 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.422845] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256597, 'name': CreateVM_Task, 'duration_secs': 0.447969} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.423019] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.423726] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.423895] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.424234] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1151.426898] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55fd734b-afd4-4492-af25-5644712677be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.433613] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1151.433613] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52557d34-6f1a-ecab-cc52-c0f3e7c1ebbd" [ 1151.433613] env[61545]: _type = "Task" [ 1151.433613] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.442070] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52557d34-6f1a-ecab-cc52-c0f3e7c1ebbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.642802] env[61545]: DEBUG nova.compute.utils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1151.644654] env[61545]: DEBUG nova.compute.manager [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1151.662104] env[61545]: DEBUG oslo_vmware.api [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256598, 'name': ReconfigVM_Task, 'duration_secs': 0.218007} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.666108] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838811', 'volume_id': '627e30aa-7351-463d-9453-98a2cb96ea31', 'name': 'volume-627e30aa-7351-463d-9453-98a2cb96ea31', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2129a1eb-4ad7-42ef-9554-6202f7a44f58', 'attached_at': '', 'detached_at': '', 'volume_id': '627e30aa-7351-463d-9453-98a2cb96ea31', 'serial': '627e30aa-7351-463d-9453-98a2cb96ea31'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1151.668956] env[61545]: DEBUG nova.network.neutron [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updated VIF entry in instance network info cache for port c279f08f-d443-4a8b-bd37-296ed181c6a7. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.669412] env[61545]: DEBUG nova.network.neutron [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.689828] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52be2c91-f4d8-4ebf-bf35-6b4bc185cd22, 'name': SearchDatastore_Task, 'duration_secs': 0.014226} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.691501] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f61f30c-54d5-4924-8919-c1bcf40d463f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.699206] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1151.699206] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52840652-ddcb-c190-b4e8-e3e8d93e89d9" [ 1151.699206] env[61545]: _type = "Task" [ 1151.699206] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.716598] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52840652-ddcb-c190-b4e8-e3e8d93e89d9, 'name': SearchDatastore_Task, 'duration_secs': 0.011003} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.716738] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.717047] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 413f3b55-0db1-4331-b19f-5cd6c4eeb48a/413f3b55-0db1-4331-b19f-5cd6c4eeb48a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1151.717334] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c0ae538-02fb-4585-8bdf-933a7249d795 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.729731] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1151.729731] env[61545]: value = "task-4256600" [ 1151.729731] env[61545]: _type = "Task" [ 1151.729731] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.735410] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256600, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.809079] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256599, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.857630] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "refresh_cache-f2975097-29a3-46cc-9dea-0c414baff246" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.904988] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2237c3f8-970e-41f1-9b65-189cdb90995d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.918563] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c257c4be-9140-4b60-94ed-09a4972d4d0b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.959079] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e59d061-cd01-4965-aaab-4e30cd01e3dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.971426] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7e9ff0-502f-4061-baef-394d3220c5fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.975701] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52557d34-6f1a-ecab-cc52-c0f3e7c1ebbd, 'name': SearchDatastore_Task, 'duration_secs': 0.016133} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.976738] env[61545]: DEBUG nova.network.neutron [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1151.978772] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.978960] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1151.979304] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.980229] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.980229] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1151.980450] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad157957-c9ef-4a67-b6e7-97067d65ed3a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.996180] env[61545]: DEBUG nova.compute.provider_tree [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.014527] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.014831] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.016451] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1dd115d-2396-44b5-bc02-8d3dd2f9f619 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.026366] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1152.026366] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ff83cb-17ef-7733-e7dd-f40d1d886158" [ 1152.026366] env[61545]: _type = "Task" [ 1152.026366] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.036798] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ff83cb-17ef-7733-e7dd-f40d1d886158, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.133479] env[61545]: DEBUG nova.network.neutron [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.147317] env[61545]: DEBUG nova.compute.manager [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1152.178721] env[61545]: DEBUG oslo_concurrency.lockutils [req-9eee9764-c98c-4e72-8464-daa66245718c req-c24fef0d-6fd1-4054-9a78-abe0c90e9297 service nova] Releasing lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.192365] env[61545]: DEBUG nova.network.neutron [req-f4c6818a-96a0-44df-84fb-7d14a7de0c96 req-c7a6a696-aabb-4f1f-88da-8c96c0cf1778 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Updated VIF entry in instance network info cache for port 39d2066d-48ee-40bd-bb98-733c92c48910. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.192729] env[61545]: DEBUG nova.network.neutron [req-f4c6818a-96a0-44df-84fb-7d14a7de0c96 req-c7a6a696-aabb-4f1f-88da-8c96c0cf1778 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Updating instance_info_cache with network_info: [{"id": "39d2066d-48ee-40bd-bb98-733c92c48910", "address": "fa:16:3e:e2:a0:d7", "network": {"id": "5b2240ec-40ff-457d-b473-f03f3ccb432d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1839058245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e026364ae074b0b8a6a6ef4a8d841ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "65497291-07f3-434c-bd42-657a0cb03365", "external-id": "nsx-vlan-transportzone-279", "segmentation_id": 279, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d2066d-48", "ovs_interfaceid": "39d2066d-48ee-40bd-bb98-733c92c48910", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.236675] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256600, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.310347] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256599, 'name': ReconfigVM_Task, 'duration_secs': 0.546487} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.310672] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 4f713be2-4c38-413b-874d-a39a4c01a1be/4f713be2-4c38-413b-874d-a39a4c01a1be.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.311386] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30daf963-ae9c-4aab-b10d-1ec30bea2bae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.320223] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1152.320223] env[61545]: value = "task-4256601" [ 1152.320223] env[61545]: _type = "Task" [ 1152.320223] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.333593] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256601, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.500564] env[61545]: DEBUG nova.scheduler.client.report [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1152.538948] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ff83cb-17ef-7733-e7dd-f40d1d886158, 'name': SearchDatastore_Task, 'duration_secs': 0.073714} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.539872] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-975e38fa-4829-453b-bb94-d41a466adc46 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.546934] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1152.546934] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ebe7a6-8a77-5d84-2b19-200362943b8a" [ 1152.546934] env[61545]: _type = "Task" [ 1152.546934] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.555054] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ebe7a6-8a77-5d84-2b19-200362943b8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.638018] env[61545]: DEBUG oslo_concurrency.lockutils [req-f2983394-e53b-4152-97a6-2c25ff1ec79d req-530f96fd-2fb2-4588-bc43-35cc2f6a75d7 service nova] Releasing lock "refresh_cache-f2975097-29a3-46cc-9dea-0c414baff246" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.638018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquired lock "refresh_cache-f2975097-29a3-46cc-9dea-0c414baff246" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.638018] env[61545]: DEBUG nova.network.neutron [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.695603] env[61545]: DEBUG oslo_concurrency.lockutils [req-f4c6818a-96a0-44df-84fb-7d14a7de0c96 req-c7a6a696-aabb-4f1f-88da-8c96c0cf1778 service nova] Releasing lock "refresh_cache-e0ae4965-42eb-4286-8cd9-a5c82426f1bf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.727947] env[61545]: DEBUG nova.objects.instance [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'flavor' on Instance uuid 2129a1eb-4ad7-42ef-9554-6202f7a44f58 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.743726] env[61545]: DEBUG nova.network.neutron [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Successfully updated port: 7a524b94-5ca7-497a-8066-258b28fe2992 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1152.758466] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256600, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59381} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.761365] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 413f3b55-0db1-4331-b19f-5cd6c4eeb48a/413f3b55-0db1-4331-b19f-5cd6c4eeb48a.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1152.764219] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1152.765564] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf0bde1b-5459-4aae-bff7-c02cf8ddce96 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.778674] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1152.778674] env[61545]: value = "task-4256602" [ 1152.778674] env[61545]: _type = "Task" [ 1152.778674] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.791035] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.837966] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256601, 'name': Rename_Task, 'duration_secs': 0.171335} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.838843] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1152.838843] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-adbc46ee-c058-45a5-b2ca-e257ff97dc07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.848523] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1152.848523] env[61545]: value = "task-4256603" [ 1152.848523] env[61545]: _type = "Task" [ 1152.848523] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.859342] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.010356] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.012879] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.480s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.014517] env[61545]: INFO nova.compute.claims [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1153.031527] env[61545]: INFO nova.scheduler.client.report [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Deleted allocations for instance ab1779b4-707e-4bd8-adea-940805654e1a [ 1153.056818] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ebe7a6-8a77-5d84-2b19-200362943b8a, 'name': SearchDatastore_Task, 'duration_secs': 0.010998} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.057125] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.057449] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 3b4fd643-c536-4da9-b1a3-82cd74d24f3e/3b4fd643-c536-4da9-b1a3-82cd74d24f3e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1153.058451] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30182a52-092a-4b1c-8269-abb43bba2035 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.066564] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1153.066564] env[61545]: value = "task-4256604" [ 1153.066564] env[61545]: _type = "Task" [ 1153.066564] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.076174] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.158678] env[61545]: DEBUG nova.compute.manager [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1153.175706] env[61545]: DEBUG nova.network.neutron [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1153.191266] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1153.191526] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1153.191680] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1153.191881] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1153.192138] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1153.192308] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1153.192528] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1153.192690] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1153.192856] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1153.193027] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1153.193212] env[61545]: DEBUG nova.virt.hardware [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1153.194162] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7129efee-fdb1-4d94-b8b4-34fd86e207ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.205274] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3344513c-4653-40e9-a26d-745424a72914 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.222830] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.228517] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Creating folder: Project (b87a4f6cfa7b47e28cc87fa0c8a2af8f). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1153.231285] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3f908bb-79ce-4b71-8c55-43697bde5bea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.235535] env[61545]: DEBUG oslo_concurrency.lockutils [None req-1952d476-ca7d-4d93-863c-7715f733b9ed tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.849s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.247753] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-a5ec3957-4646-4de4-8eac-9f0fbbf8da52" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.247905] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-a5ec3957-4646-4de4-8eac-9f0fbbf8da52" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.248079] env[61545]: DEBUG nova.network.neutron [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1153.249394] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Created folder: Project (b87a4f6cfa7b47e28cc87fa0c8a2af8f) in parent group-v838542. [ 1153.249674] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Creating folder: Instances. Parent ref: group-v838819. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1153.249957] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4018a493-7c7e-4403-bc66-5196d0aa5cae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.263651] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Created folder: Instances in parent group-v838819. [ 1153.263880] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1153.264593] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1153.264832] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a8596ba-cb2b-4f19-bf76-2b5467d81d82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.288205] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.288205] env[61545]: value = "task-4256607" [ 1153.288205] env[61545]: _type = "Task" [ 1153.288205] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.298055] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081241} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.298974] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1153.299955] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cd32d3-1496-4f8a-adc3-08a7772c01c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.307035] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256607, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.333671] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 413f3b55-0db1-4331-b19f-5cd6c4eeb48a/413f3b55-0db1-4331-b19f-5cd6c4eeb48a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1153.336459] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f34a245d-e044-43f7-a2aa-9c53303a6c54 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.361830] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256603, 'name': PowerOnVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.363547] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1153.363547] env[61545]: value = "task-4256608" [ 1153.363547] env[61545]: _type = "Task" [ 1153.363547] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.373812] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256608, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.390405] env[61545]: DEBUG nova.network.neutron [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Updating instance_info_cache with network_info: [{"id": "750ea46d-7298-43ac-a56d-08093fc56a42", "address": "fa:16:3e:e8:0a:ad", "network": {"id": "1cd37358-b134-468c-bdf6-70081c63389f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1728909090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d765e0f1dd914baf8b3b5f82780d9f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap750ea46d-72", "ovs_interfaceid": "750ea46d-7298-43ac-a56d-08093fc56a42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.453080] env[61545]: DEBUG nova.compute.manager [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Received event network-vif-plugged-7a524b94-5ca7-497a-8066-258b28fe2992 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1153.453257] env[61545]: DEBUG oslo_concurrency.lockutils [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] Acquiring lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.453523] env[61545]: DEBUG oslo_concurrency.lockutils [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] Lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.453864] env[61545]: DEBUG oslo_concurrency.lockutils [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] Lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.454162] env[61545]: DEBUG nova.compute.manager [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] No waiting events found dispatching network-vif-plugged-7a524b94-5ca7-497a-8066-258b28fe2992 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1153.454475] env[61545]: WARNING nova.compute.manager [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Received unexpected event network-vif-plugged-7a524b94-5ca7-497a-8066-258b28fe2992 for instance with vm_state building and task_state spawning. [ 1153.454764] env[61545]: DEBUG nova.compute.manager [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Received event network-changed-7a524b94-5ca7-497a-8066-258b28fe2992 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1153.455047] env[61545]: DEBUG nova.compute.manager [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Refreshing instance network info cache due to event network-changed-7a524b94-5ca7-497a-8066-258b28fe2992. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1153.455451] env[61545]: DEBUG oslo_concurrency.lockutils [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] Acquiring lock "refresh_cache-a5ec3957-4646-4de4-8eac-9f0fbbf8da52" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.540245] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a24d8861-17f4-4856-97e9-7861c0324ccb tempest-ServerAddressesNegativeTestJSON-735079992 tempest-ServerAddressesNegativeTestJSON-735079992-project-member] Lock "ab1779b4-707e-4bd8-adea-940805654e1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.558s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.580479] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256604, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.786910] env[61545]: DEBUG nova.network.neutron [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1153.798859] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256607, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.863271] env[61545]: DEBUG oslo_vmware.api [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256603, 'name': PowerOnVM_Task, 'duration_secs': 0.608815} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.863593] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1153.863820] env[61545]: INFO nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Took 10.16 seconds to spawn the instance on the hypervisor. [ 1153.864055] env[61545]: DEBUG nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1153.864974] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9830fc71-126b-4cda-bd47-10cbfa180c87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.879996] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256608, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.893594] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Releasing lock "refresh_cache-f2975097-29a3-46cc-9dea-0c414baff246" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.893878] env[61545]: DEBUG nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Instance network_info: |[{"id": "750ea46d-7298-43ac-a56d-08093fc56a42", "address": "fa:16:3e:e8:0a:ad", "network": {"id": "1cd37358-b134-468c-bdf6-70081c63389f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1728909090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d765e0f1dd914baf8b3b5f82780d9f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap750ea46d-72", "ovs_interfaceid": "750ea46d-7298-43ac-a56d-08093fc56a42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1153.894425] env[61545]: DEBUG nova.compute.manager [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1153.897387] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:0a:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '750ea46d-7298-43ac-a56d-08093fc56a42', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.905021] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Creating folder: Project (d765e0f1dd914baf8b3b5f82780d9f16). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1153.905541] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75d8cf83-0e84-4e37-a73f-5a7ae6b3de60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.918947] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Created folder: Project (d765e0f1dd914baf8b3b5f82780d9f16) in parent group-v838542. [ 1153.919169] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Creating folder: Instances. Parent ref: group-v838822. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1153.919423] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17663711-833d-44a4-b409-73e0fec8d5d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.930493] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Created folder: Instances in parent group-v838822. [ 1153.930798] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1153.931033] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1153.931262] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed2f6f24-ab86-4ba1-8046-a4dae3346c71 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.954586] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.954586] env[61545]: value = "task-4256611" [ 1153.954586] env[61545]: _type = "Task" [ 1153.954586] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.963569] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256611, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.085274] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256604, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700957} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.086642] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 3b4fd643-c536-4da9-b1a3-82cd74d24f3e/3b4fd643-c536-4da9-b1a3-82cd74d24f3e.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1154.087067] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1154.087425] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-439c7dc9-2cf7-4088-86ae-0c05c4acfb0d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.097133] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1154.097133] env[61545]: value = "task-4256612" [ 1154.097133] env[61545]: _type = "Task" [ 1154.097133] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.097133] env[61545]: DEBUG nova.network.neutron [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Updating instance_info_cache with network_info: [{"id": "7a524b94-5ca7-497a-8066-258b28fe2992", "address": "fa:16:3e:00:8b:4c", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a524b94-5c", "ovs_interfaceid": "7a524b94-5ca7-497a-8066-258b28fe2992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.108070] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256612, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.259203] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a7b064-c88f-40ff-b3b9-eb7f7a9e91e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.267726] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1580676-ed8a-4082-bb2c-93d0a766b337 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.306173] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209dadcf-e8bc-4c7d-981b-0b2f75101272 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.318792] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c0bec5-16dc-4d02-8183-f7b9c458bb97 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.323294] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256607, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.351835] env[61545]: DEBUG nova.compute.provider_tree [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1154.378930] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256608, 'name': ReconfigVM_Task, 'duration_secs': 0.967113} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.379815] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 413f3b55-0db1-4331-b19f-5cd6c4eeb48a/413f3b55-0db1-4331-b19f-5cd6c4eeb48a.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1154.385188] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-315fa2ac-96fc-4a46-8c70-ab5c9f916d3a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.391117] env[61545]: INFO nova.compute.manager [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Took 31.08 seconds to build instance. [ 1154.397190] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1154.397190] env[61545]: value = "task-4256613" [ 1154.397190] env[61545]: _type = "Task" [ 1154.397190] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.415793] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256613, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.428776] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.465741] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256611, 'name': CreateVM_Task, 'duration_secs': 0.466004} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.465940] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1154.466722] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.466891] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.467266] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1154.467645] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9de58f6-26c8-4808-a369-71f635f87edd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.473815] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1154.473815] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fe31bc-f5a7-f7fa-4b71-3d7936ac4b64" [ 1154.473815] env[61545]: _type = "Task" [ 1154.473815] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.483581] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fe31bc-f5a7-f7fa-4b71-3d7936ac4b64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.603155] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-a5ec3957-4646-4de4-8eac-9f0fbbf8da52" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.603617] env[61545]: DEBUG nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Instance network_info: |[{"id": "7a524b94-5ca7-497a-8066-258b28fe2992", "address": "fa:16:3e:00:8b:4c", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a524b94-5c", "ovs_interfaceid": "7a524b94-5ca7-497a-8066-258b28fe2992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1154.604042] env[61545]: DEBUG oslo_concurrency.lockutils [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] Acquired lock "refresh_cache-a5ec3957-4646-4de4-8eac-9f0fbbf8da52" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.604275] env[61545]: DEBUG nova.network.neutron [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Refreshing network info cache for port 7a524b94-5ca7-497a-8066-258b28fe2992 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1154.605924] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:8b:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a524b94-5ca7-497a-8066-258b28fe2992', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1154.618562] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1154.620698] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1154.621074] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b1ebf93-7079-4ee8-8ecd-752e0607fb53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.643765] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256612, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140575} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.644421] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.645295] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430c0252-5c17-4c90-bc0f-5d39bfc395ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.651429] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1154.651429] env[61545]: value = "task-4256614" [ 1154.651429] env[61545]: _type = "Task" [ 1154.651429] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.670937] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 3b4fd643-c536-4da9-b1a3-82cd74d24f3e/3b4fd643-c536-4da9-b1a3-82cd74d24f3e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.676608] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2587f2c3-e2fd-4825-a215-98d134b76725 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.698571] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256614, 'name': CreateVM_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.700285] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1154.700285] env[61545]: value = "task-4256615" [ 1154.700285] env[61545]: _type = "Task" [ 1154.700285] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.709367] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256615, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.814162] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256607, 'name': CreateVM_Task, 'duration_secs': 1.207861} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.814162] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1154.814162] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.885409] env[61545]: ERROR nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [req-70fcb824-b047-4373-9df6-e371a209d44e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-70fcb824-b047-4373-9df6-e371a209d44e"}]} [ 1154.894196] env[61545]: DEBUG oslo_concurrency.lockutils [None req-9bd11ab6-fd43-4313-8f30-757835d96f66 tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "4f713be2-4c38-413b-874d-a39a4c01a1be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.594s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.909076] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256613, 'name': Rename_Task, 'duration_secs': 0.41499} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.909387] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1154.909945] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-556baed9-80f3-48c9-96f9-5eb804f9ca44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.919403] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1154.921968] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1154.921968] env[61545]: value = "task-4256616" [ 1154.921968] env[61545]: _type = "Task" [ 1154.921968] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.932468] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.936910] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1154.937177] env[61545]: DEBUG nova.compute.provider_tree [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1154.955434] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1154.984154] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1154.994791] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fe31bc-f5a7-f7fa-4b71-3d7936ac4b64, 'name': SearchDatastore_Task, 'duration_secs': 0.017149} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.994902] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.995216] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1154.995488] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.995661] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.995885] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1154.996737] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.997328] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1154.997717] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c045cbd-f744-40a7-81ba-d30f9fcb4632 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.001714] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d407c2b5-12a7-48a1-873e-f11e0a898b0b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.012024] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1155.012024] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d4a0d5-07ce-0ac9-8961-c8b038e33cdd" [ 1155.012024] env[61545]: _type = "Task" [ 1155.012024] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.016471] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1155.016705] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1155.020576] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18df5d45-da08-4165-998a-ac923de38482 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.025048] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d4a0d5-07ce-0ac9-8961-c8b038e33cdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.032313] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1155.032313] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526787b0-1e3b-9a3b-5624-fd4fc54a24f6" [ 1155.032313] env[61545]: _type = "Task" [ 1155.032313] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.042598] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526787b0-1e3b-9a3b-5624-fd4fc54a24f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.178241] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256614, 'name': CreateVM_Task, 'duration_secs': 0.392627} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.178777] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1155.179594] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.212720] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256615, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.221601] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3a5022-2ad1-46f8-b6b4-db52aff55839 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.230870] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612eac4d-3b01-4776-bf3d-1cf0c99b48b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.273813] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139a6842-5fcb-479d-ba4f-ca89bb528e07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.285060] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8925228c-698c-4556-bfba-4b73539d8482 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.301550] env[61545]: DEBUG nova.compute.provider_tree [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1155.433558] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256616, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.519418] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d4a0d5-07ce-0ac9-8961-c8b038e33cdd, 'name': SearchDatastore_Task, 'duration_secs': 0.021169} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.522188] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.522188] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1155.522188] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.522188] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.522188] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1155.522188] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54f4e01f-9f46-48f8-9996-83fa2dc759fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.526452] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1155.526452] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cc657a-9983-f5c2-1dec-a053387fd304" [ 1155.526452] env[61545]: _type = "Task" [ 1155.526452] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.536464] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cc657a-9983-f5c2-1dec-a053387fd304, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.547274] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526787b0-1e3b-9a3b-5624-fd4fc54a24f6, 'name': SearchDatastore_Task, 'duration_secs': 0.0154} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.548756] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc5da3dc-c12c-4288-9504-51b6bb98346b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.556031] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1155.556031] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e6504c-3fba-4257-1d08-1915da76bcda" [ 1155.556031] env[61545]: _type = "Task" [ 1155.556031] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.565622] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e6504c-3fba-4257-1d08-1915da76bcda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.594509] env[61545]: DEBUG nova.network.neutron [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Updated VIF entry in instance network info cache for port 7a524b94-5ca7-497a-8066-258b28fe2992. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1155.594976] env[61545]: DEBUG nova.network.neutron [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Updating instance_info_cache with network_info: [{"id": "7a524b94-5ca7-497a-8066-258b28fe2992", "address": "fa:16:3e:00:8b:4c", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a524b94-5c", "ovs_interfaceid": "7a524b94-5ca7-497a-8066-258b28fe2992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.711970] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256615, 'name': ReconfigVM_Task, 'duration_secs': 0.58355} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.712266] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 3b4fd643-c536-4da9-b1a3-82cd74d24f3e/3b4fd643-c536-4da9-b1a3-82cd74d24f3e.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.712986] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1488968-c5a2-4d3c-9a68-db3c41016d81 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.720098] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1155.720098] env[61545]: value = "task-4256617" [ 1155.720098] env[61545]: _type = "Task" [ 1155.720098] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.730290] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256617, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.829967] env[61545]: ERROR nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [req-67490b8e-f588-4a4e-b834-84b420367a8d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-67490b8e-f588-4a4e-b834-84b420367a8d"}]} [ 1155.853288] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1155.877937] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1155.878855] env[61545]: DEBUG nova.compute.provider_tree [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1155.898027] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1155.926021] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1155.947781] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256616, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.042138] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cc657a-9983-f5c2-1dec-a053387fd304, 'name': SearchDatastore_Task, 'duration_secs': 0.021291} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.042138] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.042138] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1156.042336] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.069903] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e6504c-3fba-4257-1d08-1915da76bcda, 'name': SearchDatastore_Task, 'duration_secs': 0.016292} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.073035] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.073283] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] f2975097-29a3-46cc-9dea-0c414baff246/f2975097-29a3-46cc-9dea-0c414baff246.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1156.074855] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.074855] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1156.074855] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b9cf3d7-f003-406b-8f76-4698ef914fa7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.078741] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b86a55d-f35c-481e-ba66-44204f20902a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.087774] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1156.087774] env[61545]: value = "task-4256618" [ 1156.087774] env[61545]: _type = "Task" [ 1156.087774] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.098647] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1156.098647] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1156.100050] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cd8b08c-bc3e-4c1c-8cd5-5f8ad88875ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.103709] env[61545]: DEBUG oslo_concurrency.lockutils [req-0d1a1842-006a-4b5a-899d-8c439597b273 req-93c6a70e-5100-4210-8d50-8bd3eec5f3df service nova] Releasing lock "refresh_cache-a5ec3957-4646-4de4-8eac-9f0fbbf8da52" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.110569] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.115211] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1156.115211] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521cf11a-81b6-a3fb-8065-108392c25a68" [ 1156.115211] env[61545]: _type = "Task" [ 1156.115211] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.126239] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521cf11a-81b6-a3fb-8065-108392c25a68, 'name': SearchDatastore_Task, 'duration_secs': 0.013165} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.127369] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17477727-90cd-4f91-9626-65739c774cac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.138125] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1156.138125] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c905e9-8954-e8ed-d5e6-a894df4a408a" [ 1156.138125] env[61545]: _type = "Task" [ 1156.138125] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.153830] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c905e9-8954-e8ed-d5e6-a894df4a408a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.217870] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f84024e-eb54-483e-8361-08e604b36d44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.231036] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256617, 'name': Rename_Task, 'duration_secs': 0.198149} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.233267] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1156.233911] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-490257e4-ad0c-4eb6-9f5c-57d8f190491f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.236597] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79849b0c-8df9-4291-bb5a-7867d9c56f56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.274242] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecf24bc-0630-4f89-8fb4-52490c553bf9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.277131] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1156.277131] env[61545]: value = "task-4256619" [ 1156.277131] env[61545]: _type = "Task" [ 1156.277131] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.285252] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d420873a-bafa-4b93-b3e1-bc643c1fd997 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.293304] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256619, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.304335] env[61545]: DEBUG nova.compute.provider_tree [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1156.440038] env[61545]: DEBUG oslo_vmware.api [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256616, 'name': PowerOnVM_Task, 'duration_secs': 1.057952} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.440497] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1156.440832] env[61545]: INFO nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Took 17.40 seconds to spawn the instance on the hypervisor. [ 1156.441129] env[61545]: DEBUG nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1156.442418] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01c2662-138c-4634-8e55-26641f847431 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.461236] env[61545]: DEBUG nova.compute.manager [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1156.604823] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256618, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.649722] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c905e9-8954-e8ed-d5e6-a894df4a408a, 'name': SearchDatastore_Task, 'duration_secs': 0.011602} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.650068] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.650349] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5/00c4a77a-e049-4511-95c9-e4b6596490c5.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1156.650697] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.650932] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1156.651194] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce1800be-66ed-4e85-bd12-d969741e61e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.653744] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e637bf8-80f1-46f5-8661-7c5df3b8cb6c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.662868] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1156.662868] env[61545]: value = "task-4256620" [ 1156.662868] env[61545]: _type = "Task" [ 1156.662868] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.668615] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1156.668935] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1156.673098] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25d4f7b1-24bf-4ce8-b146-c77c96fdfe80 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.676288] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.682748] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1156.682748] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f4f305-972f-fe0d-c1bc-727b2a17ca88" [ 1156.682748] env[61545]: _type = "Task" [ 1156.682748] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.694661] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f4f305-972f-fe0d-c1bc-727b2a17ca88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.795127] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256619, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.842547] env[61545]: DEBUG nova.scheduler.client.report [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 142 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1156.842957] env[61545]: DEBUG nova.compute.provider_tree [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 142 to 143 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1156.843208] env[61545]: DEBUG nova.compute.provider_tree [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1156.976398] env[61545]: INFO nova.compute.manager [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Took 37.25 seconds to build instance. [ 1156.995869] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.052839] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.053135] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.102870] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597398} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.103037] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] f2975097-29a3-46cc-9dea-0c414baff246/f2975097-29a3-46cc-9dea-0c414baff246.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1157.103261] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1157.103563] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a966ff0d-1b03-4e60-b899-55154fb20af3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.111956] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1157.111956] env[61545]: value = "task-4256621" [ 1157.111956] env[61545]: _type = "Task" [ 1157.111956] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.121458] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256621, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.174229] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256620, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.197781] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f4f305-972f-fe0d-c1bc-727b2a17ca88, 'name': SearchDatastore_Task, 'duration_secs': 0.01918} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.199658] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dd17b88-5bb9-4756-926e-d2886c5b36aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.207266] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1157.207266] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ad76fd-f9a5-b7f5-703a-114ab7c3c47f" [ 1157.207266] env[61545]: _type = "Task" [ 1157.207266] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.218440] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ad76fd-f9a5-b7f5-703a-114ab7c3c47f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.291173] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256619, 'name': PowerOnVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.349734] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.337s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.350545] env[61545]: DEBUG nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1157.354270] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.926s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.395705] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquiring lock "4f713be2-4c38-413b-874d-a39a4c01a1be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.396023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "4f713be2-4c38-413b-874d-a39a4c01a1be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.396244] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquiring lock "4f713be2-4c38-413b-874d-a39a4c01a1be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.396431] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "4f713be2-4c38-413b-874d-a39a4c01a1be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.396595] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "4f713be2-4c38-413b-874d-a39a4c01a1be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.400395] env[61545]: INFO nova.compute.manager [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Terminating instance [ 1157.479081] env[61545]: DEBUG oslo_concurrency.lockutils [None req-03e44b77-8d98-4b94-a2fa-29077bd2efab tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.763s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.556712] env[61545]: DEBUG nova.compute.utils [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1157.622288] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256621, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128329} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.622288] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1157.623096] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8b3b21-6b17-49fb-982b-407928bc2ae4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.376720] env[61545]: DEBUG nova.compute.utils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1158.380290] env[61545]: INFO nova.compute.claims [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1158.383835] env[61545]: DEBUG nova.compute.manager [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1158.384054] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1158.384398] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.384609] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.384840] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.385156] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.386285] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.387680] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.334s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.398931] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] f2975097-29a3-46cc-9dea-0c414baff246/f2975097-29a3-46cc-9dea-0c414baff246.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1158.400315] env[61545]: DEBUG nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1158.403019] env[61545]: INFO nova.compute.manager [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Terminating instance [ 1158.412942] env[61545]: DEBUG nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1158.413134] env[61545]: DEBUG nova.network.neutron [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1158.415207] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e676ee-bd18-4e40-92eb-24ff18df9d8b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.417681] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67a0eda6-44aa-46c7-a373-7e5fb524c836 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.445709] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805719} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.454764] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5/00c4a77a-e049-4511-95c9-e4b6596490c5.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1158.455213] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1158.455822] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ad76fd-f9a5-b7f5-703a-114ab7c3c47f, 'name': SearchDatastore_Task, 'duration_secs': 0.051906} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.456081] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1158.456318] env[61545]: DEBUG oslo_vmware.api [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256619, 'name': PowerOnVM_Task, 'duration_secs': 1.256238} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.457466] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b0c0c4b-635b-451d-a4e0-afb3d7593d8d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.459436] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.459732] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a5ec3957-4646-4de4-8eac-9f0fbbf8da52/a5ec3957-4646-4de4-8eac-9f0fbbf8da52.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1158.459998] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e147f2ee-6329-46b1-8c4c-4079f63a1d2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.461448] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.461655] env[61545]: INFO nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Took 12.37 seconds to spawn the instance on the hypervisor. [ 1158.461832] env[61545]: DEBUG nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1158.462192] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1158.462192] env[61545]: value = "task-4256622" [ 1158.462192] env[61545]: _type = "Task" [ 1158.462192] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.462666] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79592829-bc08-4163-afcf-649705d4b171 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.465299] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afbce9e-2938-4cab-988f-68603f5dc599 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.473558] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1158.473558] env[61545]: value = "task-4256623" [ 1158.473558] env[61545]: _type = "Task" [ 1158.473558] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.474969] env[61545]: DEBUG oslo_vmware.api [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1158.474969] env[61545]: value = "task-4256624" [ 1158.474969] env[61545]: _type = "Task" [ 1158.474969] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.487274] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1158.487274] env[61545]: value = "task-4256625" [ 1158.487274] env[61545]: _type = "Task" [ 1158.487274] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.487562] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.489452] env[61545]: DEBUG nova.policy [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ad96bd4fd944165b4917c4dacaea04c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50b8a600a38442278d0cf036919f87c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1158.501958] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256623, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.506927] env[61545]: DEBUG oslo_vmware.api [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256624, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.510113] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.908534] env[61545]: INFO nova.compute.resource_tracker [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating resource usage from migration f39d11e4-901d-40a5-afc4-7e49c7dc41aa [ 1158.943585] env[61545]: DEBUG nova.compute.manager [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1158.943891] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1158.948271] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1b6751-8c1a-4b43-a0f6-51de9ad04cd1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.961712] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1158.961712] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2934815f-56f5-4e7e-874e-cacf2b80baa3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.976799] env[61545]: DEBUG oslo_vmware.api [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1158.976799] env[61545]: value = "task-4256626" [ 1158.976799] env[61545]: _type = "Task" [ 1158.976799] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.988384] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256622, 'name': ReconfigVM_Task, 'duration_secs': 0.405246} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.998026] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Reconfigured VM instance instance-00000063 to attach disk [datastore2] f2975097-29a3-46cc-9dea-0c414baff246/f2975097-29a3-46cc-9dea-0c414baff246.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1159.001702] env[61545]: DEBUG nova.network.neutron [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Successfully created port: e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1159.009198] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81615222-108c-410a-a64e-be176288cc89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.011289] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256623, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115307} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.011830] env[61545]: INFO nova.compute.manager [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Took 34.87 seconds to build instance. [ 1159.020950] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1159.021334] env[61545]: DEBUG oslo_vmware.api [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.022812] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6415a641-d4f5-4f81-86d6-fa892a5e84be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.030403] env[61545]: DEBUG oslo_vmware.api [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256624, 'name': PowerOffVM_Task, 'duration_secs': 0.277706} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.036060] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1159.036325] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1159.037034] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1159.037034] env[61545]: value = "task-4256627" [ 1159.037034] env[61545]: _type = "Task" [ 1159.037034] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.041819] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7f1a11b-8e05-46da-9625-df7c1f0657b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.044429] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256625, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.068224] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5/00c4a77a-e049-4511-95c9-e4b6596490c5.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.073181] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab7a932b-2ba0-461b-a31f-ff3be2107589 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.093888] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256627, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.095465] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1159.095465] env[61545]: value = "task-4256629" [ 1159.095465] env[61545]: _type = "Task" [ 1159.095465] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.105413] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.138939] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.139245] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.139480] env[61545]: INFO nova.compute.manager [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Attaching volume 2c71412f-9045-47cc-8b44-501e69c75aec to /dev/sdb [ 1159.143590] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1159.143907] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1159.144060] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Deleting the datastore file [datastore2] 4f713be2-4c38-413b-874d-a39a4c01a1be {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1159.144393] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34efc559-f053-4f5f-9c7f-b64fa6e7e8b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.158402] env[61545]: DEBUG oslo_vmware.api [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for the task: (returnval){ [ 1159.158402] env[61545]: value = "task-4256630" [ 1159.158402] env[61545]: _type = "Task" [ 1159.158402] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.167603] env[61545]: DEBUG oslo_vmware.api [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.193444] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc53fe1-fe66-4ad4-9e49-b4b47bd56520 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.204458] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9746ef34-3051-49ea-98f5-aa3bc3d2f77e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.220798] env[61545]: DEBUG nova.virt.block_device [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Updating existing volume attachment record: 742fbc80-d8cf-4cf4-ab87-223327156e02 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1159.272070] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b777278-069c-41bd-89e1-dc861c964b6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.281766] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e32db52-15d0-46c2-82c3-b661022ec5ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.313962] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbc8670-4f88-4430-95e9-2db908d331f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.324995] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45965afe-87bd-4f61-80ec-5b71832db189 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.339571] env[61545]: DEBUG nova.compute.provider_tree [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.449789] env[61545]: DEBUG nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1159.483529] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.483841] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.484022] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.484271] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.484447] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.484598] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.484808] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.484968] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.485148] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.485363] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.485548] env[61545]: DEBUG nova.virt.hardware [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.486848] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a652637-d003-484d-91d8-54f053dc8a18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.503921] env[61545]: DEBUG oslo_vmware.api [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256626, 'name': PowerOffVM_Task, 'duration_secs': 0.264234} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.505246] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bc1acb-a610-472f-bbca-47c3082a497a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.510618] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1159.510618] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1159.510793] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ed3007f-3b98-4610-8f54-381086fa894a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.522473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e73b142c-ce95-48cb-a3c4-30cfe0177e19 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.391s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.522831] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576003} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.532465] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a5ec3957-4646-4de4-8eac-9f0fbbf8da52/a5ec3957-4646-4de4-8eac-9f0fbbf8da52.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1159.532465] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1159.532465] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a79e17b-857b-4b7c-8d70-a70b5d806688 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.540666] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1159.540666] env[61545]: value = "task-4256633" [ 1159.540666] env[61545]: _type = "Task" [ 1159.540666] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.560029] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256633, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.561931] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256627, 'name': Rename_Task, 'duration_secs': 0.178753} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.562279] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1159.562554] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-454590a7-b941-4274-9814-92dffcad269c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.570635] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1159.570635] env[61545]: value = "task-4256634" [ 1159.570635] env[61545]: _type = "Task" [ 1159.570635] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.580686] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.605778] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256629, 'name': ReconfigVM_Task, 'duration_secs': 0.304559} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.609027] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5/00c4a77a-e049-4511-95c9-e4b6596490c5.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1159.609027] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5d76aca-3bb8-4222-8858-f468e379cdfc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.616450] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1159.616450] env[61545]: value = "task-4256635" [ 1159.616450] env[61545]: _type = "Task" [ 1159.616450] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.628795] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256635, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.669216] env[61545]: DEBUG oslo_vmware.api [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Task: {'id': task-4256630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167309} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.669791] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1159.670136] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1159.670536] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1159.670931] env[61545]: INFO nova.compute.manager [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1159.671457] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1159.671906] env[61545]: DEBUG nova.compute.manager [-] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1159.672203] env[61545]: DEBUG nova.network.neutron [-] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1159.682024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1159.682024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1159.682024] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Deleting the datastore file [datastore2] 413f3b55-0db1-4331-b19f-5cd6c4eeb48a {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1159.682024] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f1616f2-a068-4bb1-813f-b4112d3663fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.690575] env[61545]: DEBUG oslo_vmware.api [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1159.690575] env[61545]: value = "task-4256636" [ 1159.690575] env[61545]: _type = "Task" [ 1159.690575] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.702149] env[61545]: DEBUG oslo_vmware.api [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.843061] env[61545]: DEBUG nova.scheduler.client.report [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.018358] env[61545]: DEBUG nova.compute.manager [req-59e09075-b226-4a6e-b2c4-258221e88b47 req-c2d50c57-eb0e-4642-8c72-f657be94e95c service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-changed-c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1160.018358] env[61545]: DEBUG nova.compute.manager [req-59e09075-b226-4a6e-b2c4-258221e88b47 req-c2d50c57-eb0e-4642-8c72-f657be94e95c service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Refreshing instance network info cache due to event network-changed-c279f08f-d443-4a8b-bd37-296ed181c6a7. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1160.018358] env[61545]: DEBUG oslo_concurrency.lockutils [req-59e09075-b226-4a6e-b2c4-258221e88b47 req-c2d50c57-eb0e-4642-8c72-f657be94e95c service nova] Acquiring lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.018554] env[61545]: DEBUG oslo_concurrency.lockutils [req-59e09075-b226-4a6e-b2c4-258221e88b47 req-c2d50c57-eb0e-4642-8c72-f657be94e95c service nova] Acquired lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.018641] env[61545]: DEBUG nova.network.neutron [req-59e09075-b226-4a6e-b2c4-258221e88b47 req-c2d50c57-eb0e-4642-8c72-f657be94e95c service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Refreshing network info cache for port c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1160.052909] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256633, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078354} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.053395] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1160.054539] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbf8291-390d-44bf-9d91-7cf48f3ef171 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.086259] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] a5ec3957-4646-4de4-8eac-9f0fbbf8da52/a5ec3957-4646-4de4-8eac-9f0fbbf8da52.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1160.095176] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41e43e72-a431-4447-abda-9c1b4e07e83d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.110687] env[61545]: DEBUG nova.network.neutron [-] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.124032] env[61545]: DEBUG oslo_vmware.api [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256634, 'name': PowerOnVM_Task, 'duration_secs': 0.521537} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.133618] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1160.134253] env[61545]: INFO nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Took 10.70 seconds to spawn the instance on the hypervisor. [ 1160.134253] env[61545]: DEBUG nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1160.134380] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1160.134380] env[61545]: value = "task-4256637" [ 1160.134380] env[61545]: _type = "Task" [ 1160.134380] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.135633] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7dd9927-5b7c-49e6-acb7-a0590bee85e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.145095] env[61545]: DEBUG nova.compute.manager [req-ff9b7cac-649e-424b-a456-684d01de4fb9 req-d2f13562-27ee-42fd-89fd-7efb8e2c57f8 service nova] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Received event network-vif-deleted-042733f1-0011-46e6-b88b-bc359a44bca2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1160.149786] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256635, 'name': Rename_Task, 'duration_secs': 0.205917} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.150519] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1160.150854] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c1abf35-5555-4251-affb-635a03273f9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.157879] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256637, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.167036] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1160.167036] env[61545]: value = "task-4256638" [ 1160.167036] env[61545]: _type = "Task" [ 1160.167036] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.177710] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.203337] env[61545]: DEBUG oslo_vmware.api [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293183} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.203616] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1160.203799] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1160.204139] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1160.204274] env[61545]: INFO nova.compute.manager [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1160.204526] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1160.205091] env[61545]: DEBUG nova.compute.manager [-] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1160.205091] env[61545]: DEBUG nova.network.neutron [-] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1160.348430] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.994s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.348643] env[61545]: INFO nova.compute.manager [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Migrating [ 1160.355819] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.360s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.614298] env[61545]: INFO nova.compute.manager [-] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Took 0.94 seconds to deallocate network for instance. [ 1160.651985] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256637, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.675235] env[61545]: INFO nova.compute.manager [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Took 31.39 seconds to build instance. [ 1160.685666] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256638, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.868484] env[61545]: INFO nova.compute.claims [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1160.873351] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.873605] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.873605] env[61545]: DEBUG nova.network.neutron [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.099023] env[61545]: DEBUG nova.network.neutron [req-59e09075-b226-4a6e-b2c4-258221e88b47 req-c2d50c57-eb0e-4642-8c72-f657be94e95c service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updated VIF entry in instance network info cache for port c279f08f-d443-4a8b-bd37-296ed181c6a7. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1161.099023] env[61545]: DEBUG nova.network.neutron [req-59e09075-b226-4a6e-b2c4-258221e88b47 req-c2d50c57-eb0e-4642-8c72-f657be94e95c service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.124384] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.149971] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256637, 'name': ReconfigVM_Task, 'duration_secs': 0.584359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.150907] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Reconfigured VM instance instance-00000064 to attach disk [datastore2] a5ec3957-4646-4de4-8eac-9f0fbbf8da52/a5ec3957-4646-4de4-8eac-9f0fbbf8da52.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1161.151582] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1e11dab-6534-46bc-8564-be00406155ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.158530] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1161.158530] env[61545]: value = "task-4256639" [ 1161.158530] env[61545]: _type = "Task" [ 1161.158530] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.166855] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256639, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.176252] env[61545]: DEBUG oslo_vmware.api [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256638, 'name': PowerOnVM_Task, 'duration_secs': 0.76311} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.176870] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1161.178063] env[61545]: INFO nova.compute.manager [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Took 8.02 seconds to spawn the instance on the hypervisor. [ 1161.178299] env[61545]: DEBUG nova.compute.manager [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1161.178756] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c421d6a3-3111-4833-b9bd-2eac592a4823 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "f2975097-29a3-46cc-9dea-0c414baff246" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.901s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.179587] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90e0f06-7aae-4e9c-9e72-8e89df68d8e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.375585] env[61545]: INFO nova.compute.resource_tracker [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating resource usage from migration 3a035504-5737-4891-8650-0538e13b5dea [ 1161.600757] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84533245-0166-4676-ae95-cac79f2c179d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.603700] env[61545]: DEBUG oslo_concurrency.lockutils [req-59e09075-b226-4a6e-b2c4-258221e88b47 req-c2d50c57-eb0e-4642-8c72-f657be94e95c service nova] Releasing lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.611510] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ca54ce-9d1b-4b77-a420-e3561e1fbb42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.676567] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b7bf00-08a9-47d3-a32b-31cbc07139a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.685010] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256639, 'name': Rename_Task, 'duration_secs': 0.374425} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.689979] env[61545]: DEBUG nova.network.neutron [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Successfully updated port: e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1161.689979] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1161.689979] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa10fa87-1ae9-4436-a7f6-37b5385ecf33 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.692184] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c775a7-d025-4fad-9079-7f754ae12569 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.710892] env[61545]: DEBUG nova.compute.provider_tree [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.713215] env[61545]: INFO nova.compute.manager [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Took 27.50 seconds to build instance. [ 1161.718045] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1161.718045] env[61545]: value = "task-4256641" [ 1161.718045] env[61545]: _type = "Task" [ 1161.718045] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.725749] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.079041] env[61545]: DEBUG nova.network.neutron [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance_info_cache with network_info: [{"id": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "address": "fa:16:3e:ed:19:86", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb793a557-df", "ovs_interfaceid": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.190605] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.190764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.190963] env[61545]: DEBUG nova.network.neutron [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1162.217194] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4c33d4b2-53c5-4708-945f-0299acdd35c6 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "00c4a77a-e049-4511-95c9-e4b6596490c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.017s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.218547] env[61545]: DEBUG nova.scheduler.client.report [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1162.237027] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256641, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.264934] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-vif-deleted-9dcaf9ff-4661-4e01-8e12-31e081cb7c9a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1162.265155] env[61545]: INFO nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Neutron deleted interface 9dcaf9ff-4661-4e01-8e12-31e081cb7c9a; detaching it from the instance and deleting it from the info cache [ 1162.265488] env[61545]: DEBUG nova.network.neutron [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updating instance_info_cache with network_info: [{"id": "cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91", "address": "fa:16:3e:0a:36:b9", "network": {"id": "31158699-c741-42f2-bebe-3b7916e59177", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315829624", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbeccc2f-cf", "ovs_interfaceid": "cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "address": "fa:16:3e:79:cb:0d", "network": {"id": "21ac6a94-51de-4b05-9841-f6bed1d33849", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666320482", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98be7dc1-d5", "ovs_interfaceid": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.569949] env[61545]: DEBUG nova.network.neutron [-] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.581759] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.625282] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "f2975097-29a3-46cc-9dea-0c414baff246" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.625576] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "f2975097-29a3-46cc-9dea-0c414baff246" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.625801] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "f2975097-29a3-46cc-9dea-0c414baff246-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.625992] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "f2975097-29a3-46cc-9dea-0c414baff246-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.626208] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "f2975097-29a3-46cc-9dea-0c414baff246-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.628851] env[61545]: INFO nova.compute.manager [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Terminating instance [ 1162.729027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.372s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.729027] env[61545]: INFO nova.compute.manager [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Migrating [ 1162.740051] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.615s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.740268] env[61545]: DEBUG nova.objects.instance [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lazy-loading 'resources' on Instance uuid 4f713be2-4c38-413b-874d-a39a4c01a1be {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.746089] env[61545]: DEBUG nova.network.neutron [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1162.761218] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256641, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.768576] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3dff7a7f-b4b9-4a4d-b639-d4bca4f252ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.788626] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a73728a-ef71-400c-a6d1-1f91bfc01495 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.833611] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Detach interface failed, port_id=9dcaf9ff-4661-4e01-8e12-31e081cb7c9a, reason: Instance 413f3b55-0db1-4331-b19f-5cd6c4eeb48a could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1162.834063] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received event network-vif-plugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1162.834476] env[61545]: DEBUG oslo_concurrency.lockutils [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.834909] env[61545]: DEBUG oslo_concurrency.lockutils [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.835242] env[61545]: DEBUG oslo_concurrency.lockutils [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.835537] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] No waiting events found dispatching network-vif-plugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1162.835915] env[61545]: WARNING nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received unexpected event network-vif-plugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 for instance with vm_state building and task_state spawning. [ 1162.836233] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-vif-deleted-cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1162.836528] env[61545]: INFO nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Neutron deleted interface cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91; detaching it from the instance and deleting it from the info cache [ 1162.838501] env[61545]: DEBUG nova.network.neutron [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updating instance_info_cache with network_info: [{"id": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "address": "fa:16:3e:79:cb:0d", "network": {"id": "21ac6a94-51de-4b05-9841-f6bed1d33849", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666320482", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98be7dc1-d5", "ovs_interfaceid": "98be7dc1-d53f-476a-8f23-de85f656f6b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.038262] env[61545]: DEBUG nova.network.neutron [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating instance_info_cache with network_info: [{"id": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "address": "fa:16:3e:bf:70:d6", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06d5cf8-45", "ovs_interfaceid": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.076068] env[61545]: INFO nova.compute.manager [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Rebuilding instance [ 1163.079371] env[61545]: INFO nova.compute.manager [-] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Took 2.87 seconds to deallocate network for instance. [ 1163.134132] env[61545]: DEBUG nova.compute.manager [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1163.134393] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1163.135290] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a52acf0-f827-4ab3-8c37-cc5378e39d50 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.143978] env[61545]: DEBUG nova.compute.manager [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1163.145036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69129f5-1a61-49c7-ae9b-bfa8a1116336 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.150498] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1163.151121] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a02a5b7-cb68-43e1-b009-041d8191a24f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.157956] env[61545]: DEBUG oslo_vmware.api [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1163.157956] env[61545]: value = "task-4256642" [ 1163.157956] env[61545]: _type = "Task" [ 1163.157956] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.167744] env[61545]: DEBUG oslo_vmware.api [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.234727] env[61545]: DEBUG oslo_vmware.api [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256641, 'name': PowerOnVM_Task, 'duration_secs': 1.085243} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.234844] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1163.235083] env[61545]: INFO nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Took 12.48 seconds to spawn the instance on the hypervisor. [ 1163.235281] env[61545]: DEBUG nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1163.236237] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c10fc5d-d3a2-4274-9973-eb6bcfee67d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.258708] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.258708] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.258708] env[61545]: DEBUG nova.network.neutron [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1163.343024] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59ce6cc0-039b-4183-9217-98f5b8fbfe86 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.354353] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f9d4f8-663b-4a2c-960a-38477b46a0c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.393125] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Detach interface failed, port_id=cbeccc2f-cfc6-4e1e-8f7b-d7883e01be91, reason: Instance 413f3b55-0db1-4331-b19f-5cd6c4eeb48a could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1163.393539] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received event network-changed-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1163.393773] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Refreshing instance network info cache due to event network-changed-e06d5cf8-450e-488f-8ba6-9e7d62811ba1. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1163.394073] env[61545]: DEBUG oslo_concurrency.lockutils [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] Acquiring lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.507490] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14943d01-6659-4d7a-951f-8a51031a5b79 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.516844] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e6a179-cc6f-41bc-8ab6-39a49d45c12d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.551435] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.551814] env[61545]: DEBUG nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Instance network_info: |[{"id": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "address": "fa:16:3e:bf:70:d6", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06d5cf8-45", "ovs_interfaceid": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1163.552794] env[61545]: DEBUG oslo_concurrency.lockutils [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] Acquired lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.552987] env[61545]: DEBUG nova.network.neutron [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Refreshing network info cache for port e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1163.554228] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:70:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5116f690-f825-4fee-8a47-42b073e716c5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e06d5cf8-450e-488f-8ba6-9e7d62811ba1', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1163.561701] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1163.562995] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01542dda-b212-4e41-b003-bd088e2e2126 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.568391] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1163.569306] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bf04045-5502-4d5d-bddb-d38f6c405ab9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.591955] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.597209] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d649bb-a84e-47b7-87cc-42a41eb79a75 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.601489] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1163.601489] env[61545]: value = "task-4256643" [ 1163.601489] env[61545]: _type = "Task" [ 1163.601489] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.616197] env[61545]: DEBUG nova.compute.provider_tree [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.627433] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256643, 'name': CreateVM_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.674348] env[61545]: DEBUG oslo_vmware.api [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256642, 'name': PowerOffVM_Task, 'duration_secs': 0.352962} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.675268] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1163.675446] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1163.675706] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6b84e88-c9c5-4c1e-ae12-7f105a2e7c52 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.756450] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1163.757216] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1163.757560] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Deleting the datastore file [datastore2] f2975097-29a3-46cc-9dea-0c414baff246 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.763972] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c11c0128-27ea-4dd5-9d1f-f8a6a9f9c52d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.770281] env[61545]: INFO nova.compute.manager [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Took 33.68 seconds to build instance. [ 1163.778233] env[61545]: DEBUG oslo_vmware.api [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for the task: (returnval){ [ 1163.778233] env[61545]: value = "task-4256645" [ 1163.778233] env[61545]: _type = "Task" [ 1163.778233] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.788019] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1163.788019] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838826', 'volume_id': '2c71412f-9045-47cc-8b44-501e69c75aec', 'name': 'volume-2c71412f-9045-47cc-8b44-501e69c75aec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c61ca5f4-78ae-4626-977d-8c17dc12c012', 'attached_at': '', 'detached_at': '', 'volume_id': '2c71412f-9045-47cc-8b44-501e69c75aec', 'serial': '2c71412f-9045-47cc-8b44-501e69c75aec'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1163.788019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b54a2a6-8b45-44fd-9d88-d905acf413e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.798339] env[61545]: DEBUG oslo_vmware.api [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.818096] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3788ee1-6487-4ece-90c4-725c144ae880 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.855471] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] volume-2c71412f-9045-47cc-8b44-501e69c75aec/volume-2c71412f-9045-47cc-8b44-501e69c75aec.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.856537] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01ac4336-6dab-4749-b7ef-3a1474efcd9a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.881141] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1163.881141] env[61545]: value = "task-4256646" [ 1163.881141] env[61545]: _type = "Task" [ 1163.881141] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.892496] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256646, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.985488] env[61545]: DEBUG nova.network.neutron [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updated VIF entry in instance network info cache for port e06d5cf8-450e-488f-8ba6-9e7d62811ba1. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1163.985881] env[61545]: DEBUG nova.network.neutron [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating instance_info_cache with network_info: [{"id": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "address": "fa:16:3e:bf:70:d6", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06d5cf8-45", "ovs_interfaceid": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.112348] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a92187c-0138-46bb-9129-0891bd95bede {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.125088] env[61545]: DEBUG nova.scheduler.client.report [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1164.128653] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256643, 'name': CreateVM_Task, 'duration_secs': 0.405879} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.147588] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1164.148959] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance '2129a1eb-4ad7-42ef-9554-6202f7a44f58' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1164.153570] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.415s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.156274] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.156461] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.156956] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1164.157500] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.566s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.157805] env[61545]: DEBUG nova.objects.instance [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lazy-loading 'resources' on Instance uuid 413f3b55-0db1-4331-b19f-5cd6c4eeb48a {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.160413] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9390a4f-ba3d-4bae-aa0a-4ec8ca8056c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.169393] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1164.170424] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1164.170424] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d04b6-b231-64b8-1c4b-f382c3ca9189" [ 1164.170424] env[61545]: _type = "Task" [ 1164.170424] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.171279] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd8e058f-d5c7-4919-a299-0f40c236c04f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.188854] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d04b6-b231-64b8-1c4b-f382c3ca9189, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.192343] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1164.192343] env[61545]: value = "task-4256647" [ 1164.192343] env[61545]: _type = "Task" [ 1164.192343] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.198774] env[61545]: INFO nova.scheduler.client.report [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Deleted allocations for instance 4f713be2-4c38-413b-874d-a39a4c01a1be [ 1164.210979] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.228456] env[61545]: DEBUG nova.network.neutron [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance_info_cache with network_info: [{"id": "55458c7f-c486-49fb-966b-0478ed8948ee", "address": "fa:16:3e:e2:4c:9e", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55458c7f-c4", "ovs_interfaceid": "55458c7f-c486-49fb-966b-0478ed8948ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.272433] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7dab3236-4f9b-449f-b218-9bcbb2cc2428 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.191s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.292050] env[61545]: DEBUG oslo_vmware.api [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Task: {'id': task-4256645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254854} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.292050] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.292050] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1164.292300] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1164.292526] env[61545]: INFO nova.compute.manager [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1164.292832] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.293118] env[61545]: DEBUG nova.compute.manager [-] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1164.293272] env[61545]: DEBUG nova.network.neutron [-] [instance: f2975097-29a3-46cc-9dea-0c414baff246] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1164.392431] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256646, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.488660] env[61545]: DEBUG oslo_concurrency.lockutils [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] Releasing lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.489086] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Received event network-vif-deleted-98be7dc1-d53f-476a-8f23-de85f656f6b2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1164.489369] env[61545]: INFO nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Neutron deleted interface 98be7dc1-d53f-476a-8f23-de85f656f6b2; detaching it from the instance and deleting it from the info cache [ 1164.489651] env[61545]: DEBUG nova.network.neutron [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.662089] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1164.667074] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cbb6c58c-68c5-4fd5-90f9-b57c6c30ba46 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.681128] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1164.681128] env[61545]: value = "task-4256648" [ 1164.681128] env[61545]: _type = "Task" [ 1164.681128] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.695191] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d04b6-b231-64b8-1c4b-f382c3ca9189, 'name': SearchDatastore_Task, 'duration_secs': 0.020074} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.697262] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.697755] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1164.698087] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.698298] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.698623] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1164.708399] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-577a16c2-de59-4191-b47f-b4c0a0919c38 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.711126] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.720125] env[61545]: DEBUG nova.compute.manager [req-8a227078-332f-4599-a864-a6066b75ea82 req-b58d0042-04d5-4b47-961b-844ce4c0ab80 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Received event network-vif-deleted-750ea46d-7298-43ac-a56d-08093fc56a42 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1164.720408] env[61545]: INFO nova.compute.manager [req-8a227078-332f-4599-a864-a6066b75ea82 req-b58d0042-04d5-4b47-961b-844ce4c0ab80 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Neutron deleted interface 750ea46d-7298-43ac-a56d-08093fc56a42; detaching it from the instance and deleting it from the info cache [ 1164.720677] env[61545]: DEBUG nova.network.neutron [req-8a227078-332f-4599-a864-a6066b75ea82 req-b58d0042-04d5-4b47-961b-844ce4c0ab80 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.727864] env[61545]: DEBUG oslo_concurrency.lockutils [None req-08d7dd44-0992-4334-9dbc-1bd389dadc0b tempest-ServerTagsTestJSON-1274712918 tempest-ServerTagsTestJSON-1274712918-project-member] Lock "4f713be2-4c38-413b-874d-a39a4c01a1be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.332s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.730884] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256647, 'name': PowerOffVM_Task, 'duration_secs': 0.247757} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.731581] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.734071] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1164.734578] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1164.736114] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490fb3fa-32d9-4372-8a2d-2fdbd8d20ef3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.740808] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1164.741089] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1164.742408] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9c5adc7-ce5d-4677-9907-fca3007591fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.753081] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1164.754109] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a555ca46-2c08-4dab-a127-00d848279fca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.758616] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1164.758616] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5216ac73-d32d-f712-1647-f247ff76a735" [ 1164.758616] env[61545]: _type = "Task" [ 1164.758616] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.775296] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5216ac73-d32d-f712-1647-f247ff76a735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.792690] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1164.793065] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1164.793364] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Deleting the datastore file [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1164.793727] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47fab432-7cd4-4d3d-baa6-30cbed2f7e0e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.803076] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1164.803076] env[61545]: value = "task-4256650" [ 1164.803076] env[61545]: _type = "Task" [ 1164.803076] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.818574] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.897357] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256646, 'name': ReconfigVM_Task, 'duration_secs': 1.013181} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.897862] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Reconfigured VM instance instance-0000005c to attach disk [datastore1] volume-2c71412f-9045-47cc-8b44-501e69c75aec/volume-2c71412f-9045-47cc-8b44-501e69c75aec.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1164.903072] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcd84079-0486-433a-9562-adb88e468730 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.920971] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1164.920971] env[61545]: value = "task-4256651" [ 1164.920971] env[61545]: _type = "Task" [ 1164.920971] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.933466] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256651, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.962987] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0d2dab-a290-4b05-8573-719e826c937c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.972127] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eec2fd8-cd38-4ebf-8a3c-568f056b0e38 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.981965] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21763c96-5a50-4398-8831-3d0a9f447cd5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.013291] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-021165fb-0b2e-4506-9aa2-7c57915b4bc0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.016368] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4506c3-3a6c-4c23-acaf-3c0f64f4f143 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.020407] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c5f4500-48a2-4239-8cf3-5d58aa0da1c1 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Suspending the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1165.021317] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-cae1d74b-159e-4760-b718-e40d4462a7cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.027296] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32b849f-0f33-4f0b-b09e-2499781cd38a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.036058] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3a6301-3deb-4607-b98e-947887b5890d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.047617] env[61545]: DEBUG oslo_vmware.api [None req-8c5f4500-48a2-4239-8cf3-5d58aa0da1c1 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1165.047617] env[61545]: value = "task-4256652" [ 1165.047617] env[61545]: _type = "Task" [ 1165.047617] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.059865] env[61545]: DEBUG nova.compute.provider_tree [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.073582] env[61545]: DEBUG nova.compute.manager [req-37120f1d-4baf-4f8b-a0d1-2afa65fcc6e2 req-8282991a-edd7-40c1-aec5-29c11af008ec service nova] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Detach interface failed, port_id=98be7dc1-d53f-476a-8f23-de85f656f6b2, reason: Instance 413f3b55-0db1-4331-b19f-5cd6c4eeb48a could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1165.075468] env[61545]: DEBUG nova.scheduler.client.report [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.082772] env[61545]: DEBUG oslo_vmware.api [None req-8c5f4500-48a2-4239-8cf3-5d58aa0da1c1 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256652, 'name': SuspendVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.193141] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256648, 'name': PowerOffVM_Task, 'duration_secs': 0.255208} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.193453] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1165.193639] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance '2129a1eb-4ad7-42ef-9554-6202f7a44f58' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1165.197713] env[61545]: DEBUG nova.network.neutron [-] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.232767] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9306d91-d290-4f34-8713-e4684803e346 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.249775] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc340469-349f-486c-8317-d6401618c976 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.269820] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5216ac73-d32d-f712-1647-f247ff76a735, 'name': SearchDatastore_Task, 'duration_secs': 0.014129} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.282313] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf332ce8-1619-4aad-a132-c163297aafad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.285467] env[61545]: DEBUG nova.compute.manager [req-8a227078-332f-4599-a864-a6066b75ea82 req-b58d0042-04d5-4b47-961b-844ce4c0ab80 service nova] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Detach interface failed, port_id=750ea46d-7298-43ac-a56d-08093fc56a42, reason: Instance f2975097-29a3-46cc-9dea-0c414baff246 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1165.289949] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1165.289949] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3c7c8-84c2-2263-5266-42e03523d81f" [ 1165.289949] env[61545]: _type = "Task" [ 1165.289949] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.302211] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3c7c8-84c2-2263-5266-42e03523d81f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.317831] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185894} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.317831] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1165.318128] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1165.318128] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1165.436807] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256651, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.559063] env[61545]: DEBUG oslo_vmware.api [None req-8c5f4500-48a2-4239-8cf3-5d58aa0da1c1 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256652, 'name': SuspendVM_Task} progress is 50%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.584657] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.427s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.613544] env[61545]: INFO nova.scheduler.client.report [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Deleted allocations for instance 413f3b55-0db1-4331-b19f-5cd6c4eeb48a [ 1165.702086] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1165.703451] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1165.703898] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1165.704190] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1165.704396] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1165.704593] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1165.704859] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1165.705089] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1165.705317] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1165.705588] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1165.705769] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1165.714855] env[61545]: INFO nova.compute.manager [-] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Took 1.42 seconds to deallocate network for instance. [ 1165.715644] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21aca6b0-ab43-41c8-b0d0-685f1cc3fe83 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.740136] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1165.740136] env[61545]: value = "task-4256653" [ 1165.740136] env[61545]: _type = "Task" [ 1165.740136] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.755861] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256653, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.801569] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3c7c8-84c2-2263-5266-42e03523d81f, 'name': SearchDatastore_Task, 'duration_secs': 0.02505} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.802085] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.802202] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2/bd259162-c8ea-4408-9b7c-c91b9fbfc0d2.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1165.802426] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5682488-bbc7-4271-9497-768d35b61160 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.810697] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1165.810697] env[61545]: value = "task-4256654" [ 1165.810697] env[61545]: _type = "Task" [ 1165.810697] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.821711] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.935193] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256651, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.059853] env[61545]: DEBUG oslo_vmware.api [None req-8c5f4500-48a2-4239-8cf3-5d58aa0da1c1 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256652, 'name': SuspendVM_Task, 'duration_secs': 0.965541} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.059853] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c5f4500-48a2-4239-8cf3-5d58aa0da1c1 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Suspended the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1166.059853] env[61545]: DEBUG nova.compute.manager [None req-8c5f4500-48a2-4239-8cf3-5d58aa0da1c1 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.060257] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a16680-8e66-4beb-8a01-2722f3644584 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.124423] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a05eb6d5-912b-42e3-ad74-02d8851ff431 tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "413f3b55-0db1-4331-b19f-5cd6c4eeb48a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.740s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.242672] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.243060] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.243344] env[61545]: DEBUG nova.objects.instance [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lazy-loading 'resources' on Instance uuid f2975097-29a3-46cc-9dea-0c414baff246 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.253548] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bf3026-6729-4f18-af68-108bfafc9050 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.260850] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256653, 'name': ReconfigVM_Task, 'duration_secs': 0.286307} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.261672] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance '2129a1eb-4ad7-42ef-9554-6202f7a44f58' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1166.279807] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance '12aed0d0-b5dd-4f1b-913a-000c06a8eab4' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1166.333974] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256654, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.367610] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1166.368147] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1166.368441] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1166.368719] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1166.368973] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1166.369313] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1166.369736] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1166.370090] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1166.370413] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1166.371342] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1166.371698] env[61545]: DEBUG nova.virt.hardware [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1166.373737] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de0eb32-e942-456f-8ce3-8e7a0519ab18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.387385] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acaf9d8-2b58-434d-8cd5-2f28d07535bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.412456] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1166.417783] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1166.418157] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1166.418392] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01cc5b4c-b4a6-4d35-89cf-749d7265eb5f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.442243] env[61545]: DEBUG oslo_vmware.api [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256651, 'name': ReconfigVM_Task, 'duration_secs': 1.093078} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.444613] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838826', 'volume_id': '2c71412f-9045-47cc-8b44-501e69c75aec', 'name': 'volume-2c71412f-9045-47cc-8b44-501e69c75aec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c61ca5f4-78ae-4626-977d-8c17dc12c012', 'attached_at': '', 'detached_at': '', 'volume_id': '2c71412f-9045-47cc-8b44-501e69c75aec', 'serial': '2c71412f-9045-47cc-8b44-501e69c75aec'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1166.446234] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1166.446234] env[61545]: value = "task-4256655" [ 1166.446234] env[61545]: _type = "Task" [ 1166.446234] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.456471] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256655, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.786926] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1166.787566] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1166.787566] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1166.787867] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1166.788516] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1166.788516] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1166.788689] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1166.788822] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1166.789029] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1166.789241] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1166.789683] env[61545]: DEBUG nova.virt.hardware [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1166.797034] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1166.799491] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1166.799823] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f14df3d-0e03-4d0a-ade6-4ab79e352149 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.823910] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92c3bb1b-4ba5-463e-a62f-cc92fcb4c715 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.839188] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.935976} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.842859] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2/bd259162-c8ea-4408-9b7c-c91b9fbfc0d2.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1166.843999] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1166.843999] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1166.843999] env[61545]: value = "task-4256656" [ 1166.843999] env[61545]: _type = "Task" [ 1166.843999] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.843999] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1166.843999] env[61545]: value = "task-4256657" [ 1166.843999] env[61545]: _type = "Task" [ 1166.843999] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.843999] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2eb68a0-017c-4528-be11-9f1b2c1e6162 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.867308] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256656, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.874158] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256657, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.874329] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1166.874329] env[61545]: value = "task-4256658" [ 1166.874329] env[61545]: _type = "Task" [ 1166.874329] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.889284] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256658, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.964356] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256655, 'name': CreateVM_Task, 'duration_secs': 0.495297} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.964356] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1166.964562] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.964620] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.964934] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1166.965230] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9238afb-2246-4fc0-9036-ce28ad4c3694 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.974160] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1166.974160] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522698ed-225a-4304-ea9b-3d560ae4ddef" [ 1166.974160] env[61545]: _type = "Task" [ 1166.974160] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.984158] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522698ed-225a-4304-ea9b-3d560ae4ddef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.057191] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f224d436-35e8-4f08-ad14-161d2b2ee03a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.066448] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22adb57-72f3-4961-9758-5a2f886df530 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.104927] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49edcb9-45d8-4276-bbb7-d8905bd9c644 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.118620] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03ec138-013d-4a90-ac79-0a23e07a798f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.136213] env[61545]: DEBUG nova.compute.provider_tree [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.363938] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256657, 'name': PowerOffVM_Task, 'duration_secs': 0.277958} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.368147] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.371155] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance '12aed0d0-b5dd-4f1b-913a-000c06a8eab4' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1167.372680] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256656, 'name': ReconfigVM_Task, 'duration_secs': 0.365201} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.373682] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1167.375699] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b9d0d6-4b4d-477a-8f0d-def08ab6c191 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.416502] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58/2129a1eb-4ad7-42ef-9554-6202f7a44f58.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1167.421539] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2ecfaa1-d190-416b-afe3-87040eb725af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.437653] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256658, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101717} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.438973] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1167.441350] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad02219f-af8f-4ec8-87e9-7dc4f4bcceb4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.446514] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1167.446514] env[61545]: value = "task-4256659" [ 1167.446514] env[61545]: _type = "Task" [ 1167.446514] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.465654] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2/bd259162-c8ea-4408-9b7c-c91b9fbfc0d2.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1167.470264] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c4d2b3f-f08c-44e8-8bd9-9fef5e84dc27 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.493110] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256659, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.494828] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1167.494828] env[61545]: value = "task-4256660" [ 1167.494828] env[61545]: _type = "Task" [ 1167.494828] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.502823] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522698ed-225a-4304-ea9b-3d560ae4ddef, 'name': SearchDatastore_Task, 'duration_secs': 0.015834} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.503766] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.504134] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1167.504527] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.504795] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.505134] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1167.510776] env[61545]: DEBUG nova.objects.instance [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'flavor' on Instance uuid c61ca5f4-78ae-4626-977d-8c17dc12c012 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1167.512276] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-deae6fe2-bbef-42af-84b5-712e7ad6f3dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.514826] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256660, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.525953] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1167.526210] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1167.527090] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3439da56-03e2-4cc0-80e6-a810f3229c20 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.534514] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1167.534514] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f3564d-04cd-10f5-8a74-92742ea613c9" [ 1167.534514] env[61545]: _type = "Task" [ 1167.534514] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.546061] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f3564d-04cd-10f5-8a74-92742ea613c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.614479] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.614929] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.615176] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.615379] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.615551] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.618071] env[61545]: INFO nova.compute.manager [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Terminating instance [ 1167.641164] env[61545]: DEBUG nova.scheduler.client.report [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.859438] env[61545]: DEBUG oslo_concurrency.lockutils [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.876267] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1167.876652] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.876778] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1167.876918] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.877210] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1167.877508] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1167.877595] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1167.877863] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1167.878023] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1167.878199] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1167.878476] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1167.887625] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c9cf1ba-fb41-47fd-8d47-78ac22cc3992 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.913872] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1167.913872] env[61545]: value = "task-4256661" [ 1167.913872] env[61545]: _type = "Task" [ 1167.913872] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.922771] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256661, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.966288] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256659, 'name': ReconfigVM_Task, 'duration_secs': 0.454532} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.966572] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58/2129a1eb-4ad7-42ef-9554-6202f7a44f58.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1167.966898] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance '2129a1eb-4ad7-42ef-9554-6202f7a44f58' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1168.006397] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256660, 'name': ReconfigVM_Task, 'duration_secs': 0.359174} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.006397] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Reconfigured VM instance instance-00000066 to attach disk [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2/bd259162-c8ea-4408-9b7c-c91b9fbfc0d2.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1168.007079] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45336ac3-2648-434e-b0e1-551f31d9f166 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.015063] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1168.015063] env[61545]: value = "task-4256662" [ 1168.015063] env[61545]: _type = "Task" [ 1168.015063] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.021143] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f09e3545-d503-41ec-a6ff-3df27f0883c7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.882s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.022232] env[61545]: DEBUG oslo_concurrency.lockutils [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.163s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.031361] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256662, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.046481] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f3564d-04cd-10f5-8a74-92742ea613c9, 'name': SearchDatastore_Task, 'duration_secs': 0.014923} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.047139] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2ff373b-d027-447c-8fd6-d98b0d0a3e68 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.053314] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1168.053314] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cbb9eb-8d3d-50d3-aade-706fe43256e0" [ 1168.053314] env[61545]: _type = "Task" [ 1168.053314] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.062493] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cbb9eb-8d3d-50d3-aade-706fe43256e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.124788] env[61545]: DEBUG nova.compute.manager [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1168.124999] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1168.126451] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804aea11-b42c-4e64-900c-b379dea24a42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.134531] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1168.134816] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-802d2eac-0feb-469a-9469-c60521fbd4f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.148841] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.176509] env[61545]: INFO nova.scheduler.client.report [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Deleted allocations for instance f2975097-29a3-46cc-9dea-0c414baff246 [ 1168.213986] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1168.214819] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1168.215149] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleting the datastore file [datastore2] a5ec3957-4646-4de4-8eac-9f0fbbf8da52 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1168.215524] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfd0df6d-d6fb-4a5d-8316-fb51bb642d75 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.225365] env[61545]: DEBUG oslo_vmware.api [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1168.225365] env[61545]: value = "task-4256664" [ 1168.225365] env[61545]: _type = "Task" [ 1168.225365] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.235758] env[61545]: DEBUG oslo_vmware.api [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.425401] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256661, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.474012] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e0e532-5a41-4104-934c-940df6bdd5e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.496848] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79c998b-fdf5-451f-ac39-f87b8ccc085d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.518620] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance '2129a1eb-4ad7-42ef-9554-6202f7a44f58' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1168.525600] env[61545]: INFO nova.compute.manager [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Detaching volume 2c71412f-9045-47cc-8b44-501e69c75aec [ 1168.534360] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256662, 'name': Rename_Task, 'duration_secs': 0.154138} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.537125] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1168.537930] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-844c794b-79fa-498e-b6ca-09ba1ff0495d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.545626] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1168.545626] env[61545]: value = "task-4256665" [ 1168.545626] env[61545]: _type = "Task" [ 1168.545626] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.555512] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.565496] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cbb9eb-8d3d-50d3-aade-706fe43256e0, 'name': SearchDatastore_Task, 'duration_secs': 0.010457} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.566951] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.567386] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5/00c4a77a-e049-4511-95c9-e4b6596490c5.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1168.568520] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d71ad08a-5c6b-45cc-abe8-11e38201f4b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.572076] env[61545]: INFO nova.virt.block_device [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Attempting to driver detach volume 2c71412f-9045-47cc-8b44-501e69c75aec from mountpoint /dev/sdb [ 1168.572310] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1168.572499] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838826', 'volume_id': '2c71412f-9045-47cc-8b44-501e69c75aec', 'name': 'volume-2c71412f-9045-47cc-8b44-501e69c75aec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c61ca5f4-78ae-4626-977d-8c17dc12c012', 'attached_at': '', 'detached_at': '', 'volume_id': '2c71412f-9045-47cc-8b44-501e69c75aec', 'serial': '2c71412f-9045-47cc-8b44-501e69c75aec'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1168.573644] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964ac858-e00b-4df8-8e68-208a75fadd0f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.601018] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d280ab1-1537-4847-93e9-c7c52a75f0ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.603897] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1168.603897] env[61545]: value = "task-4256666" [ 1168.603897] env[61545]: _type = "Task" [ 1168.603897] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.610446] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b911ba-0567-4ecc-97e6-77b57d8dab74 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.615533] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.634158] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c1c2f7-b333-4860-bdf3-37e139067e87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.650149] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] The volume has not been displaced from its original location: [datastore1] volume-2c71412f-9045-47cc-8b44-501e69c75aec/volume-2c71412f-9045-47cc-8b44-501e69c75aec.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1168.655485] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1168.655866] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18b34b9e-e1f6-4087-a2a6-53711fa537d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.675348] env[61545]: DEBUG oslo_vmware.api [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1168.675348] env[61545]: value = "task-4256667" [ 1168.675348] env[61545]: _type = "Task" [ 1168.675348] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.687353] env[61545]: DEBUG oslo_vmware.api [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.689567] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3ed5c8b7-b791-4a08-a4e7-6ec0b12d3875 tempest-ServerGroupTestJSON-1228520834 tempest-ServerGroupTestJSON-1228520834-project-member] Lock "f2975097-29a3-46cc-9dea-0c414baff246" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.064s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.736947] env[61545]: DEBUG oslo_vmware.api [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170872} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.737234] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1168.737423] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1168.737706] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1168.737952] env[61545]: INFO nova.compute.manager [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1168.738271] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1168.738500] env[61545]: DEBUG nova.compute.manager [-] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1168.738601] env[61545]: DEBUG nova.network.neutron [-] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1168.926476] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256661, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.047287] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "6ea419eb-a171-4e79-868f-25851fde8a8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.047600] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.061799] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256665, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.116871] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.179469] env[61545]: DEBUG nova.compute.manager [req-59337568-96c3-49e0-9035-e8d444569ab8 req-5998815c-94c9-484f-902f-b43914b1a1c6 service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Received event network-vif-deleted-7a524b94-5ca7-497a-8066-258b28fe2992 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1169.179469] env[61545]: INFO nova.compute.manager [req-59337568-96c3-49e0-9035-e8d444569ab8 req-5998815c-94c9-484f-902f-b43914b1a1c6 service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Neutron deleted interface 7a524b94-5ca7-497a-8066-258b28fe2992; detaching it from the instance and deleting it from the info cache [ 1169.179469] env[61545]: DEBUG nova.network.neutron [req-59337568-96c3-49e0-9035-e8d444569ab8 req-5998815c-94c9-484f-902f-b43914b1a1c6 service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.193280] env[61545]: DEBUG oslo_vmware.api [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256667, 'name': ReconfigVM_Task, 'duration_secs': 0.257333} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.194238] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1169.199342] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d355cca-c722-4dea-9dcd-f8c2f1d2742d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.216881] env[61545]: DEBUG oslo_vmware.api [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1169.216881] env[61545]: value = "task-4256668" [ 1169.216881] env[61545]: _type = "Task" [ 1169.216881] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.228032] env[61545]: DEBUG oslo_vmware.api [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256668, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.426859] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256661, 'name': ReconfigVM_Task, 'duration_secs': 1.179405} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.427241] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance '12aed0d0-b5dd-4f1b-913a-000c06a8eab4' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1169.557632] env[61545]: DEBUG nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1169.560334] env[61545]: DEBUG oslo_vmware.api [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256665, 'name': PowerOnVM_Task, 'duration_secs': 0.521247} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.560749] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1169.560942] env[61545]: INFO nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Took 10.11 seconds to spawn the instance on the hypervisor. [ 1169.561128] env[61545]: DEBUG nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1169.562107] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c78e9f-c45c-4e64-ab97-5d463d33aa72 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.618037] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.931136} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.618501] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5/00c4a77a-e049-4511-95c9-e4b6596490c5.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1169.618677] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1169.618937] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51da1694-3047-4a7b-acda-d23d2df1049b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.627205] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1169.627205] env[61545]: value = "task-4256669" [ 1169.627205] env[61545]: _type = "Task" [ 1169.627205] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.636695] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.656745] env[61545]: DEBUG nova.network.neutron [-] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.681895] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ee6300d-8fd6-421a-852a-efd5cef43742 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.692734] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7545077c-0b0d-4428-bc26-494213f3fd3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.729675] env[61545]: DEBUG nova.compute.manager [req-59337568-96c3-49e0-9035-e8d444569ab8 req-5998815c-94c9-484f-902f-b43914b1a1c6 service nova] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Detach interface failed, port_id=7a524b94-5ca7-497a-8066-258b28fe2992, reason: Instance a5ec3957-4646-4de4-8eac-9f0fbbf8da52 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1169.739041] env[61545]: DEBUG oslo_vmware.api [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256668, 'name': ReconfigVM_Task, 'duration_secs': 0.189222} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.739595] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838826', 'volume_id': '2c71412f-9045-47cc-8b44-501e69c75aec', 'name': 'volume-2c71412f-9045-47cc-8b44-501e69c75aec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c61ca5f4-78ae-4626-977d-8c17dc12c012', 'attached_at': '', 'detached_at': '', 'volume_id': '2c71412f-9045-47cc-8b44-501e69c75aec', 'serial': '2c71412f-9045-47cc-8b44-501e69c75aec'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1169.935626] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1169.935955] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1169.936050] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1169.936950] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1169.936950] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1169.936950] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1169.937056] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1169.937214] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1169.937451] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1169.937682] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1169.937928] env[61545]: DEBUG nova.virt.hardware [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1169.947234] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfiguring VM instance instance-0000005b to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1169.947649] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb52ad4a-33bf-459f-8a8e-a2e197953968 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.972802] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1169.972802] env[61545]: value = "task-4256670" [ 1169.972802] env[61545]: _type = "Task" [ 1169.972802] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.981788] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256670, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.086237] env[61545]: INFO nova.compute.manager [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Took 21.58 seconds to build instance. [ 1170.086237] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.086237] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.088876] env[61545]: INFO nova.compute.claims [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.137301] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.160381] env[61545]: INFO nova.compute.manager [-] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Took 1.42 seconds to deallocate network for instance. [ 1170.235302] env[61545]: DEBUG nova.network.neutron [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Port b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1170.294769] env[61545]: DEBUG nova.objects.instance [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'flavor' on Instance uuid c61ca5f4-78ae-4626-977d-8c17dc12c012 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1170.494231] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256670, 'name': ReconfigVM_Task, 'duration_secs': 0.173985} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.494732] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfigured VM instance instance-0000005b to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1170.495959] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c154a5a-0f1e-427e-95e9-5b6682d7602e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.527267] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4/12aed0d0-b5dd-4f1b-913a-000c06a8eab4.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1170.527267] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc5c0fc7-0e18-4501-bef3-f7a9190e4e24 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.547483] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1170.547483] env[61545]: value = "task-4256671" [ 1170.547483] env[61545]: _type = "Task" [ 1170.547483] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.557478] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256671, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.593569] env[61545]: DEBUG oslo_concurrency.lockutils [None req-c0178472-2eb8-4855-bc60-6d14d78f909b tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.097s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.616651] env[61545]: DEBUG oslo_concurrency.lockutils [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.618048] env[61545]: DEBUG oslo_concurrency.lockutils [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.640198] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.994222} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.640528] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1170.641449] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa01021-f697-4453-bbf6-190e426d9f1f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.665540] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5/00c4a77a-e049-4511-95c9-e4b6596490c5.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1170.666874] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.667145] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75666da0-159a-451f-8a39-a62dbdae631b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.688152] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1170.688152] env[61545]: value = "task-4256672" [ 1170.688152] env[61545]: _type = "Task" [ 1170.688152] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.696707] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256672, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.057866] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256671, 'name': ReconfigVM_Task, 'duration_secs': 0.297341} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.059843] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4/12aed0d0-b5dd-4f1b-913a-000c06a8eab4.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1171.059843] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance '12aed0d0-b5dd-4f1b-913a-000c06a8eab4' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1171.120690] env[61545]: DEBUG nova.compute.utils [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1171.199499] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256672, 'name': ReconfigVM_Task, 'duration_secs': 0.288362} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.202438] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5/00c4a77a-e049-4511-95c9-e4b6596490c5.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1171.203570] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cf7b09c-4756-43be-81aa-46bc84215dc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.211180] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1171.211180] env[61545]: value = "task-4256673" [ 1171.211180] env[61545]: _type = "Task" [ 1171.211180] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.222858] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256673, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.224866] env[61545]: DEBUG nova.compute.manager [req-6fb5de82-6f11-4807-a33a-314348bde6ee req-be8539fd-0aa8-40e3-b87b-d4f686e430f7 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received event network-changed-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1171.224866] env[61545]: DEBUG nova.compute.manager [req-6fb5de82-6f11-4807-a33a-314348bde6ee req-be8539fd-0aa8-40e3-b87b-d4f686e430f7 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Refreshing instance network info cache due to event network-changed-e06d5cf8-450e-488f-8ba6-9e7d62811ba1. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1171.224866] env[61545]: DEBUG oslo_concurrency.lockutils [req-6fb5de82-6f11-4807-a33a-314348bde6ee req-be8539fd-0aa8-40e3-b87b-d4f686e430f7 service nova] Acquiring lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.224866] env[61545]: DEBUG oslo_concurrency.lockutils [req-6fb5de82-6f11-4807-a33a-314348bde6ee req-be8539fd-0aa8-40e3-b87b-d4f686e430f7 service nova] Acquired lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.225511] env[61545]: DEBUG nova.network.neutron [req-6fb5de82-6f11-4807-a33a-314348bde6ee req-be8539fd-0aa8-40e3-b87b-d4f686e430f7 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Refreshing network info cache for port e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1171.259764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.260045] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.260526] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.304122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592e9992-9add-407a-962c-5f07d11bb2ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.308196] env[61545]: DEBUG oslo_concurrency.lockutils [None req-314f0d3e-4f03-4a89-8bcc-771d5e0787ad tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.285s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.313808] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f362c72b-afa9-427e-988a-bb381325af96 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.346976] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c283ab-2110-45e3-ad17-4ab731156ee8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.359997] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8c647a-66e6-4dad-bf9f-765a257dfaaf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.374447] env[61545]: DEBUG nova.compute.provider_tree [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.567877] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbf0633-2753-4646-abe0-8873a7e50de4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.587261] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8dcb1f-4ef2-4932-a215-39128fb24cc0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.608460] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance '12aed0d0-b5dd-4f1b-913a-000c06a8eab4' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1171.626531] env[61545]: DEBUG oslo_concurrency.lockutils [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.721956] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256673, 'name': Rename_Task, 'duration_secs': 0.15528} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.722263] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1171.722517] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-381073e0-c01b-4f5b-b743-808cdbd34029 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.729771] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1171.729771] env[61545]: value = "task-4256674" [ 1171.729771] env[61545]: _type = "Task" [ 1171.729771] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.738974] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256674, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.877821] env[61545]: DEBUG nova.scheduler.client.report [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.046990] env[61545]: DEBUG nova.network.neutron [req-6fb5de82-6f11-4807-a33a-314348bde6ee req-be8539fd-0aa8-40e3-b87b-d4f686e430f7 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updated VIF entry in instance network info cache for port e06d5cf8-450e-488f-8ba6-9e7d62811ba1. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1172.046990] env[61545]: DEBUG nova.network.neutron [req-6fb5de82-6f11-4807-a33a-314348bde6ee req-be8539fd-0aa8-40e3-b87b-d4f686e430f7 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating instance_info_cache with network_info: [{"id": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "address": "fa:16:3e:bf:70:d6", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06d5cf8-45", "ovs_interfaceid": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.165630] env[61545]: DEBUG nova.network.neutron [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Port 55458c7f-c486-49fb-966b-0478ed8948ee binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1172.240525] env[61545]: DEBUG oslo_vmware.api [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256674, 'name': PowerOnVM_Task, 'duration_secs': 0.477767} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.241477] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1172.242190] env[61545]: DEBUG nova.compute.manager [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1172.243469] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c009838-fa88-4afa-aad6-a5b8d3031068 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.284046] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.284817] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.284817] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "c61ca5f4-78ae-4626-977d-8c17dc12c012-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.285028] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.285319] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.288262] env[61545]: INFO nova.compute.manager [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Terminating instance [ 1172.311985] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.312230] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.312391] env[61545]: DEBUG nova.network.neutron [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1172.383245] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.383791] env[61545]: DEBUG nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1172.387011] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.720s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.387011] env[61545]: DEBUG nova.objects.instance [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'resources' on Instance uuid a5ec3957-4646-4de4-8eac-9f0fbbf8da52 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.549506] env[61545]: DEBUG oslo_concurrency.lockutils [req-6fb5de82-6f11-4807-a33a-314348bde6ee req-be8539fd-0aa8-40e3-b87b-d4f686e430f7 service nova] Releasing lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.700596] env[61545]: DEBUG oslo_concurrency.lockutils [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.700596] env[61545]: DEBUG oslo_concurrency.lockutils [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.700596] env[61545]: INFO nova.compute.manager [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Attaching volume f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75 to /dev/sdb [ 1172.731365] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8988446-03c4-4999-9474-c379f35dfb2c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.739539] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5084ce-5e03-4d37-aad9-33b14b1af43d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.753778] env[61545]: DEBUG nova.virt.block_device [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updating existing volume attachment record: 36285bf1-80d0-4870-a130-1c03b5d0ed1a {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1172.763403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.795133] env[61545]: DEBUG nova.compute.manager [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1172.795647] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1172.796901] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be791616-e4a8-4478-942e-21741aa177f6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.806906] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1172.807244] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4e3d1e1-dde5-4e9c-a716-eb568412d805 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.816722] env[61545]: DEBUG oslo_vmware.api [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1172.816722] env[61545]: value = "task-4256675" [ 1172.816722] env[61545]: _type = "Task" [ 1172.816722] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.826616] env[61545]: DEBUG oslo_vmware.api [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256675, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.894195] env[61545]: DEBUG nova.compute.utils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1172.896031] env[61545]: DEBUG nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1172.896202] env[61545]: DEBUG nova.network.neutron [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1172.974469] env[61545]: DEBUG nova.policy [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a480cf5708dd4134b64d4700782b5e5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c29495610a5f46a39670abf9a34ca73a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1173.108546] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "00c4a77a-e049-4511-95c9-e4b6596490c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.108878] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "00c4a77a-e049-4511-95c9-e4b6596490c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.109193] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "00c4a77a-e049-4511-95c9-e4b6596490c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.109428] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "00c4a77a-e049-4511-95c9-e4b6596490c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.109640] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "00c4a77a-e049-4511-95c9-e4b6596490c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.111941] env[61545]: INFO nova.compute.manager [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Terminating instance [ 1173.127039] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4082633c-2dc6-42fb-8dc9-bad3699ac8c3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.136280] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08323da4-f488-41b3-b3c5-f3a0f3ad7a06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.171891] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aba9f85-3ec4-409f-9820-07963c1985b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.192474] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.192746] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.192920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.198849] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d7d27f-64f0-4ce2-98b7-2ce9bc15c147 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.215097] env[61545]: DEBUG nova.compute.provider_tree [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.327972] env[61545]: DEBUG oslo_vmware.api [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256675, 'name': PowerOffVM_Task, 'duration_secs': 0.322748} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.328289] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1173.328487] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1173.328772] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60e80b81-91aa-4a5b-933d-b1501df50478 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.390395] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1173.390687] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1173.390916] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleting the datastore file [datastore2] c61ca5f4-78ae-4626-977d-8c17dc12c012 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1173.391253] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2e62a1f-6ee3-4ffc-958b-ded1cdb8746a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.399843] env[61545]: DEBUG nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1173.402886] env[61545]: DEBUG oslo_vmware.api [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1173.402886] env[61545]: value = "task-4256680" [ 1173.402886] env[61545]: _type = "Task" [ 1173.402886] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.412783] env[61545]: DEBUG oslo_vmware.api [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256680, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.571688] env[61545]: DEBUG nova.network.neutron [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance_info_cache with network_info: [{"id": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "address": "fa:16:3e:ed:19:86", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb793a557-df", "ovs_interfaceid": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.616438] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "refresh_cache-00c4a77a-e049-4511-95c9-e4b6596490c5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.616627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquired lock "refresh_cache-00c4a77a-e049-4511-95c9-e4b6596490c5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1173.616812] env[61545]: DEBUG nova.network.neutron [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1173.719097] env[61545]: DEBUG nova.scheduler.client.report [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1173.833594] env[61545]: DEBUG nova.network.neutron [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Successfully created port: 4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.922438] env[61545]: DEBUG oslo_vmware.api [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256680, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157767} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.922582] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1173.922707] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1173.922886] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1173.923077] env[61545]: INFO nova.compute.manager [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1173.923547] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1173.923547] env[61545]: DEBUG nova.compute.manager [-] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1173.923660] env[61545]: DEBUG nova.network.neutron [-] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1174.076020] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.154498] env[61545]: DEBUG nova.network.neutron [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1174.226988] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.230883] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.466s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.230883] env[61545]: DEBUG nova.objects.instance [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1174.253778] env[61545]: INFO nova.scheduler.client.report [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted allocations for instance a5ec3957-4646-4de4-8eac-9f0fbbf8da52 [ 1174.281938] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.281938] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.281938] env[61545]: DEBUG nova.network.neutron [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1174.303016] env[61545]: DEBUG nova.network.neutron [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.417781] env[61545]: DEBUG nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1174.423112] env[61545]: DEBUG nova.network.neutron [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Successfully created port: 84ae420f-63eb-44ec-82d6-f35d63f23506 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1174.446009] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1174.446278] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1174.446431] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1174.446609] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1174.446772] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1174.446912] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1174.447168] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1174.447538] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1174.447538] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1174.447668] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1174.447843] env[61545]: DEBUG nova.virt.hardware [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1174.448734] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01471e1-8859-4b6b-a7c0-ef03d3e56936 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.458561] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110dfc4f-8490-4681-a7e5-64e66aff7f0f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.585257] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee176d6-c2bd-4690-8f69-1cbe139da66b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.593745] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c70f66-706e-4e4e-88fa-fd7104c39700 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.761147] env[61545]: DEBUG oslo_concurrency.lockutils [None req-89f21fb6-feaf-4c53-b018-51373701c2f5 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "a5ec3957-4646-4de4-8eac-9f0fbbf8da52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.146s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.805919] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Releasing lock "refresh_cache-00c4a77a-e049-4511-95c9-e4b6596490c5" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.806465] env[61545]: DEBUG nova.compute.manager [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1174.806731] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1174.807721] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a128ff2f-840e-4599-a259-63a2f3d9c5ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.818107] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1174.818388] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d26d2c0-f119-4c38-9d62-2b0336339ef3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.827312] env[61545]: DEBUG oslo_vmware.api [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1174.827312] env[61545]: value = "task-4256681" [ 1174.827312] env[61545]: _type = "Task" [ 1174.827312] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.839967] env[61545]: DEBUG oslo_vmware.api [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.199107] env[61545]: DEBUG nova.compute.manager [req-bb5c3da9-41a9-480f-8457-c733a3699420 req-66518d26-6aa4-40a0-ac65-1cf8844372a7 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Received event network-vif-deleted-f18fe5c7-64c8-4f58-b7c8-806d3e03985e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1175.199312] env[61545]: INFO nova.compute.manager [req-bb5c3da9-41a9-480f-8457-c733a3699420 req-66518d26-6aa4-40a0-ac65-1cf8844372a7 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Neutron deleted interface f18fe5c7-64c8-4f58-b7c8-806d3e03985e; detaching it from the instance and deleting it from the info cache [ 1175.199498] env[61545]: DEBUG nova.network.neutron [req-bb5c3da9-41a9-480f-8457-c733a3699420 req-66518d26-6aa4-40a0-ac65-1cf8844372a7 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.239117] env[61545]: DEBUG oslo_concurrency.lockutils [None req-99be33f2-87b3-47df-b72b-3d651e100894 tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.247740] env[61545]: DEBUG nova.network.neutron [-] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.277790] env[61545]: DEBUG nova.network.neutron [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance_info_cache with network_info: [{"id": "55458c7f-c486-49fb-966b-0478ed8948ee", "address": "fa:16:3e:e2:4c:9e", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55458c7f-c4", "ovs_interfaceid": "55458c7f-c486-49fb-966b-0478ed8948ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.338742] env[61545]: DEBUG oslo_vmware.api [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256681, 'name': PowerOffVM_Task, 'duration_secs': 0.199875} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.339099] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1175.339281] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1175.339657] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffe0f043-38cd-468b-9e49-59e49523ba13 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.367432] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1175.367652] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1175.367880] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Deleting the datastore file [datastore2] 00c4a77a-e049-4511-95c9-e4b6596490c5 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1175.368215] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd5673a6-2542-4ca8-891c-885e7b24b478 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.375955] env[61545]: DEBUG oslo_vmware.api [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for the task: (returnval){ [ 1175.375955] env[61545]: value = "task-4256684" [ 1175.375955] env[61545]: _type = "Task" [ 1175.375955] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.387366] env[61545]: DEBUG oslo_vmware.api [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.702047] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d6defae-3612-4082-9443-aed4ba0455dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.713059] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140b537d-f160-46b6-9576-24974824acaa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.744948] env[61545]: DEBUG nova.compute.manager [req-bb5c3da9-41a9-480f-8457-c733a3699420 req-66518d26-6aa4-40a0-ac65-1cf8844372a7 service nova] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Detach interface failed, port_id=f18fe5c7-64c8-4f58-b7c8-806d3e03985e, reason: Instance c61ca5f4-78ae-4626-977d-8c17dc12c012 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1175.750777] env[61545]: INFO nova.compute.manager [-] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Took 1.83 seconds to deallocate network for instance. [ 1175.781328] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.793992] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2bb29e-933c-4df6-a668-99b5728db35b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.818671] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206dbbaf-de8c-4916-b546-5fd1aeff381b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.827075] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance '2129a1eb-4ad7-42ef-9554-6202f7a44f58' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1175.889621] env[61545]: DEBUG oslo_vmware.api [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Task: {'id': task-4256684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122926} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.889621] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1175.889621] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1175.889621] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1175.889621] env[61545]: INFO nova.compute.manager [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1175.889621] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1175.889621] env[61545]: DEBUG nova.compute.manager [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1175.889621] env[61545]: DEBUG nova.network.neutron [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1175.905576] env[61545]: DEBUG nova.network.neutron [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1175.940434] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "219981bd-04cd-4253-a15e-eebd4083bfa8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.940621] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.257095] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.257414] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.257639] env[61545]: DEBUG nova.objects.instance [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'resources' on Instance uuid c61ca5f4-78ae-4626-977d-8c17dc12c012 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1176.310285] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b971e1-f283-4f8b-93a9-da0f81f7b771 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.336186] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1176.336638] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad18574d-efaf-4661-88d3-6e6b8f8092d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.339329] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbec858-58e4-4969-be64-0b21a4ccdf1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.347978] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance '12aed0d0-b5dd-4f1b-913a-000c06a8eab4' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1176.356713] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1176.356713] env[61545]: value = "task-4256685" [ 1176.356713] env[61545]: _type = "Task" [ 1176.356713] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.365545] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256685, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.409721] env[61545]: DEBUG nova.network.neutron [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.433787] env[61545]: DEBUG nova.network.neutron [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Successfully updated port: 4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1176.444364] env[61545]: DEBUG nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1176.861258] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1176.861878] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55e07160-777b-4dcf-83eb-cd0c5c301f1d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.872619] env[61545]: DEBUG oslo_vmware.api [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256685, 'name': PowerOnVM_Task, 'duration_secs': 0.481936} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.876685] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1176.876895] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd2dd86-027f-4c8e-8e5a-9f818f7d6278 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance '2129a1eb-4ad7-42ef-9554-6202f7a44f58' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1176.880466] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1176.880466] env[61545]: value = "task-4256686" [ 1176.880466] env[61545]: _type = "Task" [ 1176.880466] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.889111] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256686, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.912876] env[61545]: INFO nova.compute.manager [-] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Took 1.02 seconds to deallocate network for instance. [ 1176.964208] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.968994] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3493499-3609-498d-a1ac-ae13b0252f90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.980218] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973532e3-e797-4079-9f52-11214ee7e066 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.012408] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b06cc25-1afd-494a-af3a-a2b9aa9705f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.020717] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33eb3d16-544a-45fa-94ce-519f314cffb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.035605] env[61545]: DEBUG nova.compute.provider_tree [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.227668] env[61545]: DEBUG nova.compute.manager [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Received event network-vif-plugged-4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1177.227996] env[61545]: DEBUG oslo_concurrency.lockutils [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] Acquiring lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.228645] env[61545]: DEBUG oslo_concurrency.lockutils [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.228895] env[61545]: DEBUG oslo_concurrency.lockutils [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.229147] env[61545]: DEBUG nova.compute.manager [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] No waiting events found dispatching network-vif-plugged-4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1177.229390] env[61545]: WARNING nova.compute.manager [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Received unexpected event network-vif-plugged-4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46 for instance with vm_state building and task_state spawning. [ 1177.229617] env[61545]: DEBUG nova.compute.manager [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Received event network-changed-4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1177.229883] env[61545]: DEBUG nova.compute.manager [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Refreshing instance network info cache due to event network-changed-4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1177.230125] env[61545]: DEBUG oslo_concurrency.lockutils [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] Acquiring lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.230328] env[61545]: DEBUG oslo_concurrency.lockutils [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] Acquired lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.230530] env[61545]: DEBUG nova.network.neutron [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Refreshing network info cache for port 4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.395275] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256686, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.421947] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.539068] env[61545]: DEBUG nova.scheduler.client.report [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.767589] env[61545]: DEBUG nova.network.neutron [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1177.800771] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1177.801078] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1177.802041] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c9ab83-85e3-43a3-be1e-1753a3be6c05 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.824904] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30dda71-e788-41db-b2a7-2281e25d375b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.852535] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75/volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.852957] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbfc7da8-4728-4833-b0a1-8e719e845ea4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.873255] env[61545]: DEBUG oslo_vmware.api [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1177.873255] env[61545]: value = "task-4256687" [ 1177.873255] env[61545]: _type = "Task" [ 1177.873255] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.877103] env[61545]: DEBUG nova.network.neutron [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.883629] env[61545]: DEBUG oslo_vmware.api [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256687, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.895745] env[61545]: DEBUG oslo_vmware.api [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256686, 'name': PowerOnVM_Task, 'duration_secs': 0.81441} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.896044] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1177.896273] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a41332f8-a31e-4577-ac0f-5f1d6385ebe2 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance '12aed0d0-b5dd-4f1b-913a-000c06a8eab4' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1178.044218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.048656] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.084s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.049840] env[61545]: INFO nova.compute.claims [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1178.070321] env[61545]: INFO nova.scheduler.client.report [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleted allocations for instance c61ca5f4-78ae-4626-977d-8c17dc12c012 [ 1178.380506] env[61545]: DEBUG oslo_concurrency.lockutils [req-5dc526c7-31d5-490a-9ebb-f759b5c887b1 req-1e8dacd1-54c9-4479-a74e-c411efdf2ed0 service nova] Releasing lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.386074] env[61545]: DEBUG oslo_vmware.api [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256687, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.581114] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4673c2a5-7b9a-4f1a-b4ac-176067f38cf7 tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "c61ca5f4-78ae-4626-977d-8c17dc12c012" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.297s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.608033] env[61545]: DEBUG nova.network.neutron [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Successfully updated port: 84ae420f-63eb-44ec-82d6-f35d63f23506 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.886028] env[61545]: DEBUG oslo_vmware.api [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256687, 'name': ReconfigVM_Task, 'duration_secs': 0.850407} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.886028] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfigured VM instance instance-0000005d to attach disk [datastore1] volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75/volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.891022] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2ccb24c-d3a0-4e49-8ba7-6ed6df1fae28 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.911068] env[61545]: DEBUG oslo_vmware.api [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1178.911068] env[61545]: value = "task-4256688" [ 1178.911068] env[61545]: _type = "Task" [ 1178.911068] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.920876] env[61545]: DEBUG oslo_vmware.api [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256688, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.096342] env[61545]: DEBUG nova.network.neutron [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Port b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1179.096618] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.096772] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.096938] env[61545]: DEBUG nova.network.neutron [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.114743] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.114966] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquired lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.115125] env[61545]: DEBUG nova.network.neutron [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.242006] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2250cb2b-8ff5-4360-9f36-c20cf2f6b06d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.250547] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c7b272-fb0b-4472-b987-abe441b88573 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.283620] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd66557-409c-48f9-af23-c433a4ff1086 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.287329] env[61545]: DEBUG nova.compute.manager [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Received event network-vif-plugged-84ae420f-63eb-44ec-82d6-f35d63f23506 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1179.287544] env[61545]: DEBUG oslo_concurrency.lockutils [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] Acquiring lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.287789] env[61545]: DEBUG oslo_concurrency.lockutils [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.287976] env[61545]: DEBUG oslo_concurrency.lockutils [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.288174] env[61545]: DEBUG nova.compute.manager [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] No waiting events found dispatching network-vif-plugged-84ae420f-63eb-44ec-82d6-f35d63f23506 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1179.288345] env[61545]: WARNING nova.compute.manager [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Received unexpected event network-vif-plugged-84ae420f-63eb-44ec-82d6-f35d63f23506 for instance with vm_state building and task_state spawning. [ 1179.288506] env[61545]: DEBUG nova.compute.manager [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Received event network-changed-84ae420f-63eb-44ec-82d6-f35d63f23506 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1179.288825] env[61545]: DEBUG nova.compute.manager [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Refreshing instance network info cache due to event network-changed-84ae420f-63eb-44ec-82d6-f35d63f23506. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1179.288825] env[61545]: DEBUG oslo_concurrency.lockutils [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] Acquiring lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.296312] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f52380-8cde-4598-a4f6-cfb265b3e979 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.313362] env[61545]: DEBUG nova.compute.provider_tree [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.429220] env[61545]: DEBUG oslo_vmware.api [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256688, 'name': ReconfigVM_Task, 'duration_secs': 0.155106} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.429625] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1179.674661] env[61545]: DEBUG nova.network.neutron [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1179.816809] env[61545]: DEBUG nova.scheduler.client.report [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.326026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.275s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.326026] env[61545]: DEBUG nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1180.327526] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.906s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.328180] env[61545]: DEBUG nova.objects.instance [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lazy-loading 'resources' on Instance uuid 00c4a77a-e049-4511-95c9-e4b6596490c5 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.391027] env[61545]: DEBUG nova.network.neutron [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance_info_cache with network_info: [{"id": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "address": "fa:16:3e:ed:19:86", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb793a557-df", "ovs_interfaceid": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.479080] env[61545]: DEBUG nova.objects.instance [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'flavor' on Instance uuid c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.493637] env[61545]: DEBUG nova.network.neutron [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Updating instance_info_cache with network_info: [{"id": "4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46", "address": "fa:16:3e:90:8a:3c", "network": {"id": "d4e3a095-62b8-47be-87bc-691c07d3ccf4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-682207266", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c4e5fb5-9f", "ovs_interfaceid": "4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "84ae420f-63eb-44ec-82d6-f35d63f23506", "address": "fa:16:3e:d6:2e:dd", "network": {"id": "2b26bdb4-bedd-49b7-a482-3f58b14f98c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-514736112", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ae420f-63", "ovs_interfaceid": "84ae420f-63eb-44ec-82d6-f35d63f23506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.833235] env[61545]: DEBUG nova.compute.utils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1180.837879] env[61545]: DEBUG nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1180.838059] env[61545]: DEBUG nova.network.neutron [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1180.892583] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.901756] env[61545]: DEBUG nova.policy [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25fd6c8662bd4b7f9da546ec78acda02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68a860104885480d9da472bc969ba6d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1180.985799] env[61545]: DEBUG oslo_concurrency.lockutils [None req-226634df-c6bb-49a6-8261-597780eaedc4 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.287s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.996899] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Releasing lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.997289] env[61545]: DEBUG nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Instance network_info: |[{"id": "4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46", "address": "fa:16:3e:90:8a:3c", "network": {"id": "d4e3a095-62b8-47be-87bc-691c07d3ccf4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-682207266", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c4e5fb5-9f", "ovs_interfaceid": "4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "84ae420f-63eb-44ec-82d6-f35d63f23506", "address": "fa:16:3e:d6:2e:dd", "network": {"id": "2b26bdb4-bedd-49b7-a482-3f58b14f98c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-514736112", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ae420f-63", "ovs_interfaceid": "84ae420f-63eb-44ec-82d6-f35d63f23506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1180.998254] env[61545]: DEBUG oslo_concurrency.lockutils [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] Acquired lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.998442] env[61545]: DEBUG nova.network.neutron [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Refreshing network info cache for port 84ae420f-63eb-44ec-82d6-f35d63f23506 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.999674] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:8a:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '12d8eedb-97cb-4d3b-b364-42d7fd8b3c85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:2e:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dced2f3d-7fd3-4a42-836d-9f02dab4c949', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84ae420f-63eb-44ec-82d6-f35d63f23506', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1181.009184] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1181.014404] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1181.015620] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2c66b1-99c4-4803-9dbb-8efa5fe32e0f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.021949] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6a4b0cf-e26c-47ae-853b-1c82d9bce16a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.047958] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994ca8d6-801c-4389-b1bb-d11b45dd570b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.053194] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1181.053194] env[61545]: value = "task-4256689" [ 1181.053194] env[61545]: _type = "Task" [ 1181.053194] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.083747] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1a7977-3d0c-433e-ae77-cdb555226d94 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.090020] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256689, 'name': CreateVM_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.095316] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92aa9c31-7e9f-4207-b27d-d9f708e5244a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.110547] env[61545]: DEBUG nova.compute.provider_tree [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.245084] env[61545]: DEBUG nova.network.neutron [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Port 55458c7f-c486-49fb-966b-0478ed8948ee binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1181.245084] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.245084] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.245308] env[61545]: DEBUG nova.network.neutron [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.338727] env[61545]: DEBUG nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1181.345813] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "5393730d-6a4b-418a-9047-4287f87c8d14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.346060] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.398879] env[61545]: DEBUG nova.compute.manager [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=61545) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1181.522672] env[61545]: INFO nova.compute.manager [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Rebuilding instance [ 1181.568247] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256689, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.574891] env[61545]: DEBUG nova.compute.manager [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1181.576038] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f789140-3ae1-4521-a567-b8d0f046eacb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.613791] env[61545]: DEBUG nova.scheduler.client.report [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.849802] env[61545]: DEBUG nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1181.880195] env[61545]: DEBUG nova.network.neutron [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Successfully created port: f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1181.890724] env[61545]: DEBUG nova.network.neutron [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Updated VIF entry in instance network info cache for port 84ae420f-63eb-44ec-82d6-f35d63f23506. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1181.891184] env[61545]: DEBUG nova.network.neutron [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Updating instance_info_cache with network_info: [{"id": "4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46", "address": "fa:16:3e:90:8a:3c", "network": {"id": "d4e3a095-62b8-47be-87bc-691c07d3ccf4", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-682207266", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "12d8eedb-97cb-4d3b-b364-42d7fd8b3c85", "external-id": "nsx-vlan-transportzone-870", "segmentation_id": 870, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c4e5fb5-9f", "ovs_interfaceid": "4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "84ae420f-63eb-44ec-82d6-f35d63f23506", "address": "fa:16:3e:d6:2e:dd", "network": {"id": "2b26bdb4-bedd-49b7-a482-3f58b14f98c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-514736112", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ae420f-63", "ovs_interfaceid": "84ae420f-63eb-44ec-82d6-f35d63f23506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.064850] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256689, 'name': CreateVM_Task, 'duration_secs': 0.52806} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.065043] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1182.065858] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.066079] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.066401] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1182.066661] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78bf1d33-681f-4ad8-ac7e-421dd662f0a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.071594] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1182.071594] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52347bc8-fd62-d80b-5bef-61bfaf256296" [ 1182.071594] env[61545]: _type = "Task" [ 1182.071594] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.082745] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52347bc8-fd62-d80b-5bef-61bfaf256296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.118541] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.141761] env[61545]: INFO nova.scheduler.client.report [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Deleted allocations for instance 00c4a77a-e049-4511-95c9-e4b6596490c5 [ 1182.250409] env[61545]: DEBUG nova.network.neutron [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance_info_cache with network_info: [{"id": "55458c7f-c486-49fb-966b-0478ed8948ee", "address": "fa:16:3e:e2:4c:9e", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55458c7f-c4", "ovs_interfaceid": "55458c7f-c486-49fb-966b-0478ed8948ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.354454] env[61545]: DEBUG nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1182.372204] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.372540] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.374173] env[61545]: INFO nova.compute.claims [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1182.385035] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1182.385268] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1182.385425] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1182.385603] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1182.385749] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1182.385897] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1182.386119] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1182.386293] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1182.386489] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1182.386653] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1182.386826] env[61545]: DEBUG nova.virt.hardware [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1182.387701] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32029aa-dcb5-45f2-94de-5db7b65236b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.394074] env[61545]: DEBUG oslo_concurrency.lockutils [req-ad74ba4f-da1a-4f28-b6c6-3f69c8b18ba0 req-8d510329-df16-4da5-b280-3f40b529c00a service nova] Releasing lock "refresh_cache-6ea419eb-a171-4e79-868f-25851fde8a8b" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.397756] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56588df8-a81f-4c2e-8ff6-dccb2ec4c97a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.535394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.583593] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52347bc8-fd62-d80b-5bef-61bfaf256296, 'name': SearchDatastore_Task, 'duration_secs': 0.034415} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.584023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.584357] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1182.584712] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.584970] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.585276] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1182.585591] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30a302f6-5dce-4ce2-a044-ac45e29ae530 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.591616] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.591963] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abfe91a4-d34b-4c47-bd66-4c40e0afa78e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.598604] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1182.598853] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1182.600933] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d05a03d7-d3ed-46e2-93ab-3d43306d3276 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.603906] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1182.603906] env[61545]: value = "task-4256690" [ 1182.603906] env[61545]: _type = "Task" [ 1182.603906] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.609665] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1182.609665] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523d59cd-b754-57f8-4056-85a790033dca" [ 1182.609665] env[61545]: _type = "Task" [ 1182.609665] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.617262] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.623429] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523d59cd-b754-57f8-4056-85a790033dca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.649403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc27e411-859d-4f48-960b-d79f3d555cec tempest-ServersListShow296Test-158138544 tempest-ServersListShow296Test-158138544-project-member] Lock "00c4a77a-e049-4511-95c9-e4b6596490c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.540s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.753556] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.117945] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256690, 'name': PowerOffVM_Task, 'duration_secs': 0.25579} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.117945] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1183.125484] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523d59cd-b754-57f8-4056-85a790033dca, 'name': SearchDatastore_Task, 'duration_secs': 0.018877} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.128621] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3868d053-55e1-4615-8acc-6f3c5b96a934 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.134362] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1183.134362] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f5aac8-59b6-ac96-d80f-b94d063bf7af" [ 1183.134362] env[61545]: _type = "Task" [ 1183.134362] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.144419] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f5aac8-59b6-ac96-d80f-b94d063bf7af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.183281] env[61545]: INFO nova.compute.manager [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Detaching volume f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75 [ 1183.219062] env[61545]: INFO nova.virt.block_device [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Attempting to driver detach volume f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75 from mountpoint /dev/sdb [ 1183.221042] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1183.221042] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1183.221042] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffee0911-1448-4387-9497-444b81f7aa4e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.244773] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a92327-2f74-4829-8f27-24505ae7e14c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.254718] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d2708f-9c70-4caa-a756-0bbb926a0bc3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.258075] env[61545]: DEBUG nova.compute.manager [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=61545) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1183.258304] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.277544] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed3a383-3bb2-4998-b5da-c61b96014f2f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.293807] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] The volume has not been displaced from its original location: [datastore1] volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75/volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1183.299158] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfiguring VM instance instance-0000005d to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1183.299512] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b2a817f-657f-4661-a4a1-4442aaec658e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.318412] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1183.318412] env[61545]: value = "task-4256691" [ 1183.318412] env[61545]: _type = "Task" [ 1183.318412] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.327394] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256691, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.581815] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7d71de-f722-4d25-83cd-29fa2662bb71 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.586722] env[61545]: DEBUG nova.compute.manager [req-280c9854-eab4-4e16-ad1a-8aa4c87873d2 req-2950b310-ac42-477d-b6d3-96155e9703bc service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Received event network-vif-plugged-f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1183.586912] env[61545]: DEBUG oslo_concurrency.lockutils [req-280c9854-eab4-4e16-ad1a-8aa4c87873d2 req-2950b310-ac42-477d-b6d3-96155e9703bc service nova] Acquiring lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.587137] env[61545]: DEBUG oslo_concurrency.lockutils [req-280c9854-eab4-4e16-ad1a-8aa4c87873d2 req-2950b310-ac42-477d-b6d3-96155e9703bc service nova] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.587304] env[61545]: DEBUG oslo_concurrency.lockutils [req-280c9854-eab4-4e16-ad1a-8aa4c87873d2 req-2950b310-ac42-477d-b6d3-96155e9703bc service nova] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.587474] env[61545]: DEBUG nova.compute.manager [req-280c9854-eab4-4e16-ad1a-8aa4c87873d2 req-2950b310-ac42-477d-b6d3-96155e9703bc service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] No waiting events found dispatching network-vif-plugged-f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1183.588104] env[61545]: WARNING nova.compute.manager [req-280c9854-eab4-4e16-ad1a-8aa4c87873d2 req-2950b310-ac42-477d-b6d3-96155e9703bc service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Received unexpected event network-vif-plugged-f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 for instance with vm_state building and task_state spawning. [ 1183.593937] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f0c01b-ae9d-4f4a-aeff-ff66a633bb10 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.627894] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9fc396-e93c-4f94-9c26-359244837414 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.641377] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b14fc21-c63f-45a7-a900-642fc7b289a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.660099] env[61545]: DEBUG nova.compute.provider_tree [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.661048] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f5aac8-59b6-ac96-d80f-b94d063bf7af, 'name': SearchDatastore_Task, 'duration_secs': 0.010027} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.661592] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.661871] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 6ea419eb-a171-4e79-868f-25851fde8a8b/6ea419eb-a171-4e79-868f-25851fde8a8b.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1183.662161] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc8b66dc-c2ae-48fb-a6ee-ca251e979f21 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.670315] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1183.670315] env[61545]: value = "task-4256692" [ 1183.670315] env[61545]: _type = "Task" [ 1183.670315] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.682244] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.690710] env[61545]: DEBUG nova.network.neutron [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Successfully updated port: f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1183.829576] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256691, 'name': ReconfigVM_Task, 'duration_secs': 0.203789} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.829687] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfigured VM instance instance-0000005d to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1183.834503] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a235b8c8-3b27-4150-be0d-73df8a604c14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.851373] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1183.851373] env[61545]: value = "task-4256693" [ 1183.851373] env[61545]: _type = "Task" [ 1183.851373] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.860080] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256693, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.163378] env[61545]: DEBUG nova.scheduler.client.report [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1184.181338] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256692, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.196330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.196330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.196330] env[61545]: DEBUG nova.network.neutron [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1184.362129] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256693, 'name': ReconfigVM_Task, 'duration_secs': 0.483479} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.362518] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1184.672178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.672781] env[61545]: DEBUG nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1184.675679] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 2.141s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.687278] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256692, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569368} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.687700] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 6ea419eb-a171-4e79-868f-25851fde8a8b/6ea419eb-a171-4e79-868f-25851fde8a8b.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1184.687968] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1184.688279] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bd9a675-4cf7-4a41-980f-791f318b22a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.696721] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1184.696721] env[61545]: value = "task-4256694" [ 1184.696721] env[61545]: _type = "Task" [ 1184.696721] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.709440] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256694, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.732292] env[61545]: DEBUG nova.network.neutron [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1184.893941] env[61545]: DEBUG nova.network.neutron [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance_info_cache with network_info: [{"id": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "address": "fa:16:3e:c6:7d:49", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdfe-4d", "ovs_interfaceid": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.183712] env[61545]: DEBUG nova.objects.instance [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'migration_context' on Instance uuid 2129a1eb-4ad7-42ef-9554-6202f7a44f58 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.183712] env[61545]: DEBUG nova.compute.utils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1185.185454] env[61545]: DEBUG nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1185.185454] env[61545]: DEBUG nova.network.neutron [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1185.207974] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256694, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.185473} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.209079] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1185.209453] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8fd6ec-1d81-4235-82ff-c689c655f73e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.236867] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 6ea419eb-a171-4e79-868f-25851fde8a8b/6ea419eb-a171-4e79-868f-25851fde8a8b.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1185.237242] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-240dc001-47ff-4f27-9a68-cd51fe9bf593 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.254323] env[61545]: DEBUG nova.policy [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9bcc01a701c4b728d810b0b27ce6249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aeb51ace7650413b987be7ddd7490182', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1185.263995] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1185.263995] env[61545]: value = "task-4256695" [ 1185.263995] env[61545]: _type = "Task" [ 1185.263995] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.274336] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256695, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.400032] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.400032] env[61545]: DEBUG nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Instance network_info: |[{"id": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "address": "fa:16:3e:c6:7d:49", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdfe-4d", "ovs_interfaceid": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1185.400032] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:7d:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db1f7867-8524-469c-ab47-d2c9e2751d98', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1185.406401] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1185.406911] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1185.407298] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1eda669-6cf0-4e88-8033-52d0cf52c93e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.422717] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1185.423326] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da8596a8-9d36-4642-922f-abf3cbdccec3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.434200] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1185.434200] env[61545]: value = "task-4256697" [ 1185.434200] env[61545]: _type = "Task" [ 1185.434200] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.434200] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1185.434200] env[61545]: value = "task-4256696" [ 1185.434200] env[61545]: _type = "Task" [ 1185.434200] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.444600] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256697, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.448276] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1185.448501] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1185.448789] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1185.449614] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576c7fb6-ee33-4218-96bb-e9f1f03ede80 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.469859] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309f2b84-451f-4d81-8f96-4f837d19ae36 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.476914] env[61545]: WARNING nova.virt.vmwareapi.driver [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1185.477190] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1185.478036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baca2c71-1e70-4175-bdba-d35929c6403c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.486113] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1185.486457] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6fa3da67-e568-4285-bb5f-441b07dd9b55 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.554476] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1185.554847] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1185.555187] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleting the datastore file [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.555480] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b404ff77-9d56-431c-b609-b074a57576bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.563789] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1185.563789] env[61545]: value = "task-4256699" [ 1185.563789] env[61545]: _type = "Task" [ 1185.563789] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.575825] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256699, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.625027] env[61545]: DEBUG nova.compute.manager [req-4dc2ef44-6890-45d6-98da-143fdb5640e1 req-748ea310-bf9a-432d-932a-d0dc97898dab service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Received event network-changed-f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1185.625134] env[61545]: DEBUG nova.compute.manager [req-4dc2ef44-6890-45d6-98da-143fdb5640e1 req-748ea310-bf9a-432d-932a-d0dc97898dab service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Refreshing instance network info cache due to event network-changed-f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1185.625410] env[61545]: DEBUG oslo_concurrency.lockutils [req-4dc2ef44-6890-45d6-98da-143fdb5640e1 req-748ea310-bf9a-432d-932a-d0dc97898dab service nova] Acquiring lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.626818] env[61545]: DEBUG oslo_concurrency.lockutils [req-4dc2ef44-6890-45d6-98da-143fdb5640e1 req-748ea310-bf9a-432d-932a-d0dc97898dab service nova] Acquired lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.626818] env[61545]: DEBUG nova.network.neutron [req-4dc2ef44-6890-45d6-98da-143fdb5640e1 req-748ea310-bf9a-432d-932a-d0dc97898dab service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Refreshing network info cache for port f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1185.685962] env[61545]: DEBUG nova.network.neutron [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Successfully created port: ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1185.691015] env[61545]: DEBUG nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1185.775418] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256695, 'name': ReconfigVM_Task, 'duration_secs': 0.347513} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.778468] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 6ea419eb-a171-4e79-868f-25851fde8a8b/6ea419eb-a171-4e79-868f-25851fde8a8b.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1185.779419] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad765efa-36cb-49bf-9eca-68ec5b1f2c02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.787687] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1185.787687] env[61545]: value = "task-4256700" [ 1185.787687] env[61545]: _type = "Task" [ 1185.787687] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.797549] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256700, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.914521] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea56fa3b-d5bd-4568-9f85-b755d6b8a796 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.929035] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb3fc5b-bd8f-4509-82a2-aa391862e405 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.962288] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77be6bfe-f3f9-431f-a348-cf59ec79a692 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.968618] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256697, 'name': CreateVM_Task, 'duration_secs': 0.357217} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.969311] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1185.970191] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.970488] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.970891] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1185.973466] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afe822d1-e30c-4df7-b29b-614ea7869294 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.976886] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f9b330-294d-4c0b-a1aa-3fec4cc27962 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.984881] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1185.984881] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529ea1a6-8e2b-e2e8-c20d-c9ad1336f89d" [ 1185.984881] env[61545]: _type = "Task" [ 1185.984881] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.992937] env[61545]: DEBUG nova.compute.provider_tree [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.003307] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529ea1a6-8e2b-e2e8-c20d-c9ad1336f89d, 'name': SearchDatastore_Task, 'duration_secs': 0.010168} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.004239] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.004480] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1186.004716] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.004869] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.005054] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.005704] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f040b46b-53fe-415a-8bd8-dbb19699a3d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.018629] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.018816] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1186.019578] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a926d59-4548-41b1-9053-4763dde00e83 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.025164] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1186.025164] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5277913f-7d07-a496-7315-aab8389a7642" [ 1186.025164] env[61545]: _type = "Task" [ 1186.025164] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.033702] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5277913f-7d07-a496-7315-aab8389a7642, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.073351] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256699, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158982} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.073601] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.073801] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.073982] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.303118] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256700, 'name': Rename_Task, 'duration_secs': 0.14139} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.303838] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1186.304260] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4aee106d-fe1b-459b-8aa6-bb64542c4ba3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.316459] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1186.316459] env[61545]: value = "task-4256701" [ 1186.316459] env[61545]: _type = "Task" [ 1186.316459] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.328145] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.502190] env[61545]: DEBUG nova.scheduler.client.report [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1186.537287] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5277913f-7d07-a496-7315-aab8389a7642, 'name': SearchDatastore_Task, 'duration_secs': 0.009419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.538476] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb934321-6f3f-4674-8945-643cedcd8cfe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.545564] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1186.545564] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5230aeff-01ad-47ce-a4d4-afb736f33f39" [ 1186.545564] env[61545]: _type = "Task" [ 1186.545564] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.555219] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5230aeff-01ad-47ce-a4d4-afb736f33f39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.579597] env[61545]: INFO nova.virt.block_device [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Booting with volume f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75 at /dev/sdb [ 1186.587168] env[61545]: DEBUG nova.network.neutron [req-4dc2ef44-6890-45d6-98da-143fdb5640e1 req-748ea310-bf9a-432d-932a-d0dc97898dab service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updated VIF entry in instance network info cache for port f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.587642] env[61545]: DEBUG nova.network.neutron [req-4dc2ef44-6890-45d6-98da-143fdb5640e1 req-748ea310-bf9a-432d-932a-d0dc97898dab service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance_info_cache with network_info: [{"id": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "address": "fa:16:3e:c6:7d:49", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdfe-4d", "ovs_interfaceid": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.629714] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1eff63b6-e7ab-41cf-88ab-bb2934742f7b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.640767] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6675e680-809c-4a00-a568-b1b4939c4e03 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.682220] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89e0fc01-e9fe-4cd8-b961-5193add97b19 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.692220] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704991e0-5516-406f-8fc4-d244b0ca793d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.704737] env[61545]: DEBUG nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1186.733842] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58de585-5b4d-4670-9254-b626b2c79d14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.742167] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94aa97c-6061-45ea-9574-985e3255d93b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.747409] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1186.747683] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.747851] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1186.748097] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.748272] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1186.748485] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1186.748797] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1186.749081] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1186.749377] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1186.749656] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1186.750020] env[61545]: DEBUG nova.virt.hardware [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1186.751323] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576a1d54-df33-494b-99ff-c1b98d413871 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.761454] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ec438e-1ccc-485d-98db-4ff7d206418e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.770048] env[61545]: DEBUG nova.virt.block_device [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updating existing volume attachment record: 287d2807-863f-4f13-8a9b-bf930498ef1d {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1186.827998] env[61545]: DEBUG oslo_vmware.api [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256701, 'name': PowerOnVM_Task, 'duration_secs': 0.499666} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.828310] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1186.828521] env[61545]: INFO nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Took 12.41 seconds to spawn the instance on the hypervisor. [ 1186.828703] env[61545]: DEBUG nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1186.829596] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704968f5-df46-4c76-80cf-edc43fa1eac9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.056381] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5230aeff-01ad-47ce-a4d4-afb736f33f39, 'name': SearchDatastore_Task, 'duration_secs': 0.025152} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.056647] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.056921] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 219981bd-04cd-4253-a15e-eebd4083bfa8/219981bd-04cd-4253-a15e-eebd4083bfa8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1187.057231] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2bfccad-b099-4497-a6f4-954924a02186 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.064120] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1187.064120] env[61545]: value = "task-4256702" [ 1187.064120] env[61545]: _type = "Task" [ 1187.064120] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.072488] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.090720] env[61545]: DEBUG oslo_concurrency.lockutils [req-4dc2ef44-6890-45d6-98da-143fdb5640e1 req-748ea310-bf9a-432d-932a-d0dc97898dab service nova] Releasing lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.216334] env[61545]: DEBUG nova.network.neutron [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Successfully updated port: ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1187.352170] env[61545]: INFO nova.compute.manager [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Took 17.29 seconds to build instance. [ 1187.518817] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.843s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.525579] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 4.267s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.574961] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256702, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489431} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.575254] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 219981bd-04cd-4253-a15e-eebd4083bfa8/219981bd-04cd-4253-a15e-eebd4083bfa8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1187.575477] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1187.575740] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7f5c3f1-928d-4668-9993-4dbbba71756b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.583386] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1187.583386] env[61545]: value = "task-4256703" [ 1187.583386] env[61545]: _type = "Task" [ 1187.583386] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.592499] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.671335] env[61545]: DEBUG nova.compute.manager [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Received event network-vif-plugged-ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1187.671557] env[61545]: DEBUG oslo_concurrency.lockutils [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] Acquiring lock "5393730d-6a4b-418a-9047-4287f87c8d14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.671785] env[61545]: DEBUG oslo_concurrency.lockutils [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] Lock "5393730d-6a4b-418a-9047-4287f87c8d14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.671955] env[61545]: DEBUG oslo_concurrency.lockutils [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] Lock "5393730d-6a4b-418a-9047-4287f87c8d14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.672140] env[61545]: DEBUG nova.compute.manager [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] No waiting events found dispatching network-vif-plugged-ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1187.672316] env[61545]: WARNING nova.compute.manager [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Received unexpected event network-vif-plugged-ce048a59-c941-4a83-bbf9-29dfc46aae60 for instance with vm_state building and task_state spawning. [ 1187.672473] env[61545]: DEBUG nova.compute.manager [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Received event network-changed-ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1187.672624] env[61545]: DEBUG nova.compute.manager [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Refreshing instance network info cache due to event network-changed-ce048a59-c941-4a83-bbf9-29dfc46aae60. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1187.672810] env[61545]: DEBUG oslo_concurrency.lockutils [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] Acquiring lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.672996] env[61545]: DEBUG oslo_concurrency.lockutils [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] Acquired lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.673140] env[61545]: DEBUG nova.network.neutron [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Refreshing network info cache for port ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1187.720026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.855361] env[61545]: DEBUG oslo_concurrency.lockutils [None req-146ec56c-0964-4e25-8f79-23113fcf9f4e tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.808s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.034354] env[61545]: DEBUG nova.objects.instance [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'migration_context' on Instance uuid 12aed0d0-b5dd-4f1b-913a-000c06a8eab4 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.092899] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073166} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.093199] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1188.094015] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1335707d-0909-4d40-a707-e875418f6850 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.116540] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 219981bd-04cd-4253-a15e-eebd4083bfa8/219981bd-04cd-4253-a15e-eebd4083bfa8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.117188] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c35ff12a-74b2-46d4-b9e5-d221adf64b57 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.139455] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1188.139455] env[61545]: value = "task-4256704" [ 1188.139455] env[61545]: _type = "Task" [ 1188.139455] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.148154] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256704, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.217912] env[61545]: DEBUG nova.network.neutron [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1188.304961] env[61545]: DEBUG nova.network.neutron [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.371952] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "6ea419eb-a171-4e79-868f-25851fde8a8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.373634] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.373634] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.373634] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.373634] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.374789] env[61545]: INFO nova.compute.manager [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Terminating instance [ 1188.650806] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256704, 'name': ReconfigVM_Task, 'duration_secs': 0.30343} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.653886] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 219981bd-04cd-4253-a15e-eebd4083bfa8/219981bd-04cd-4253-a15e-eebd4083bfa8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1188.654755] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2bde925f-8892-494f-8831-91cd2d23d97a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.662552] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1188.662552] env[61545]: value = "task-4256705" [ 1188.662552] env[61545]: _type = "Task" [ 1188.662552] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.674695] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256705, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.716826] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf1a252-49db-4819-9e7a-31b139118714 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.725294] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7781cc-68a7-4353-bae9-8f6ebba55cf5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.756148] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08c3b6d-b229-4ec0-86ee-77a15c7ee0e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.764535] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd88e373-cd8b-43bc-8b53-5e1c77358f50 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.780089] env[61545]: DEBUG nova.compute.provider_tree [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.808332] env[61545]: DEBUG oslo_concurrency.lockutils [req-6c956631-7d19-411d-ae53-9eb077d8b869 req-7174bf66-3f0e-4148-be32-1f9e0b5f786a service nova] Releasing lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.808849] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.809047] env[61545]: DEBUG nova.network.neutron [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1188.879906] env[61545]: DEBUG nova.compute.manager [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1188.880221] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1188.881883] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedddb05-914c-4e51-841a-acc3c9e001db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.890944] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1188.891246] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c7b1519-ebe2-406c-9a6e-285966167d1f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.902360] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1188.902628] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1188.902788] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1188.902968] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1188.903128] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1188.903298] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1188.903788] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1188.903788] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1188.903896] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1188.904062] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1188.904241] env[61545]: DEBUG nova.virt.hardware [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1188.905502] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b060603-a6bf-40d0-9855-6b42486e24e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.909812] env[61545]: DEBUG oslo_vmware.api [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1188.909812] env[61545]: value = "task-4256706" [ 1188.909812] env[61545]: _type = "Task" [ 1188.909812] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.921772] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9538a09-3829-48bc-beff-472c360c7707 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.925709] env[61545]: DEBUG oslo_vmware.api [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.936815] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:26:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a89c03e7-6504-4eca-9dc3-110100bbf69c', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1188.944869] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.945216] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1188.945452] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5c3f9ae-cdd5-45fd-b1c8-d86951a130e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.966370] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1188.966370] env[61545]: value = "task-4256707" [ 1188.966370] env[61545]: _type = "Task" [ 1188.966370] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.975540] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256707, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.071044] env[61545]: INFO nova.compute.manager [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Swapping old allocation on dict_keys(['7015027d-c4e1-4938-ac31-6e4672774d7e']) held by migration f39d11e4-901d-40a5-afc4-7e49c7dc41aa for instance [ 1189.103302] env[61545]: DEBUG nova.scheduler.client.report [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Overwriting current allocation {'allocations': {'7015027d-c4e1-4938-ac31-6e4672774d7e': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 146}}, 'project_id': '45efa52890714522b3058b7144b42a89', 'user_id': 'f7a70fb8ea2d498688688f7e51cf4bac', 'consumer_generation': 1} on consumer 2129a1eb-4ad7-42ef-9554-6202f7a44f58 {{(pid=61545) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1189.174200] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256705, 'name': Rename_Task, 'duration_secs': 0.159061} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.174598] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1189.174899] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97a282f2-2d7e-480a-b96b-e075ce038734 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.183663] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1189.183663] env[61545]: value = "task-4256708" [ 1189.183663] env[61545]: _type = "Task" [ 1189.183663] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.192319] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.225486] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.225677] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.225857] env[61545]: DEBUG nova.network.neutron [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1189.283508] env[61545]: DEBUG nova.scheduler.client.report [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1189.344326] env[61545]: DEBUG nova.network.neutron [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1189.348275] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.348501] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.422412] env[61545]: DEBUG oslo_vmware.api [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256706, 'name': PowerOffVM_Task, 'duration_secs': 0.236364} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.422784] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1189.422994] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1189.423321] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-356d4534-9d36-4a00-b6aa-2b6450ae167f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.476379] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256707, 'name': CreateVM_Task, 'duration_secs': 0.38843} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.476601] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1189.477309] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.477474] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.477806] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1189.478113] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0016f042-f88a-408e-b3c2-faff0292279d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.483715] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1189.483715] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52615e0d-be44-d58f-70c7-96b02bce9868" [ 1189.483715] env[61545]: _type = "Task" [ 1189.483715] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.493134] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52615e0d-be44-d58f-70c7-96b02bce9868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.496736] env[61545]: DEBUG nova.network.neutron [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Updating instance_info_cache with network_info: [{"id": "ce048a59-c941-4a83-bbf9-29dfc46aae60", "address": "fa:16:3e:c6:94:82", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce048a59-c9", "ovs_interfaceid": "ce048a59-c941-4a83-bbf9-29dfc46aae60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.527514] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1189.527750] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1189.527940] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Deleting the datastore file [datastore2] 6ea419eb-a171-4e79-868f-25851fde8a8b {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1189.528278] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61b1ec51-75dc-4d0b-b086-19a8f0700d5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.535550] env[61545]: DEBUG oslo_vmware.api [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for the task: (returnval){ [ 1189.535550] env[61545]: value = "task-4256710" [ 1189.535550] env[61545]: _type = "Task" [ 1189.535550] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.545586] env[61545]: DEBUG oslo_vmware.api [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.695446] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256708, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.851874] env[61545]: DEBUG nova.compute.utils [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1189.994122] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52615e0d-be44-d58f-70c7-96b02bce9868, 'name': SearchDatastore_Task, 'duration_secs': 0.010786} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.994441] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.994685] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1189.994925] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.995083] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.995270] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1189.995543] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a956f1bc-385d-4c5a-84f1-724753f7ef6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.999612] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.999902] env[61545]: DEBUG nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Instance network_info: |[{"id": "ce048a59-c941-4a83-bbf9-29dfc46aae60", "address": "fa:16:3e:c6:94:82", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce048a59-c9", "ovs_interfaceid": "ce048a59-c941-4a83-bbf9-29dfc46aae60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1190.000407] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:94:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10ff2092-e8eb-4768-ad4a-65a80560b447', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce048a59-c941-4a83-bbf9-29dfc46aae60', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1190.009056] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1190.010147] env[61545]: DEBUG nova.network.neutron [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance_info_cache with network_info: [{"id": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "address": "fa:16:3e:ed:19:86", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb793a557-df", "ovs_interfaceid": "b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.012308] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1190.012593] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.012764] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1190.013705] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-176cd52d-e632-428c-b550-0107904b1416 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.028615] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d8c5946-1efd-4f6b-90ed-c570ebb2db12 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.035190] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1190.035190] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526101b7-7353-ea23-137c-224080ad3237" [ 1190.035190] env[61545]: _type = "Task" [ 1190.035190] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.039704] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1190.039704] env[61545]: value = "task-4256711" [ 1190.039704] env[61545]: _type = "Task" [ 1190.039704] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.049789] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526101b7-7353-ea23-137c-224080ad3237, 'name': SearchDatastore_Task, 'duration_secs': 0.010421} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.051033] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab37194-3836-42f0-aab5-ba255c6ea27f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.059272] env[61545]: DEBUG oslo_vmware.api [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Task: {'id': task-4256710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16927} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.059754] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256711, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.060535] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1190.060679] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1190.060871] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1190.061090] env[61545]: INFO nova.compute.manager [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1190.061405] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1190.061626] env[61545]: DEBUG nova.compute.manager [-] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1190.061765] env[61545]: DEBUG nova.network.neutron [-] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1190.064656] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1190.064656] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f230e-58d4-b366-8185-8f5c2155b59e" [ 1190.064656] env[61545]: _type = "Task" [ 1190.064656] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.073961] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f230e-58d4-b366-8185-8f5c2155b59e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.196434] env[61545]: DEBUG oslo_vmware.api [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256708, 'name': PowerOnVM_Task, 'duration_secs': 0.554679} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.196776] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1190.196911] env[61545]: INFO nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Took 7.84 seconds to spawn the instance on the hypervisor. [ 1190.197117] env[61545]: DEBUG nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1190.198035] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78b5485-5a39-4fc2-bd65-f4875ad86684 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.297294] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.770s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.355811] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.510327] env[61545]: DEBUG nova.compute.manager [req-9707cf33-f28a-4192-a189-9783717c55c2 req-451b82a2-b779-45e3-bb0d-9772842c9c29 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Received event network-vif-deleted-4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1190.510593] env[61545]: INFO nova.compute.manager [req-9707cf33-f28a-4192-a189-9783717c55c2 req-451b82a2-b779-45e3-bb0d-9772842c9c29 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Neutron deleted interface 4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46; detaching it from the instance and deleting it from the info cache [ 1190.510791] env[61545]: DEBUG nova.network.neutron [req-9707cf33-f28a-4192-a189-9783717c55c2 req-451b82a2-b779-45e3-bb0d-9772842c9c29 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Updating instance_info_cache with network_info: [{"id": "84ae420f-63eb-44ec-82d6-f35d63f23506", "address": "fa:16:3e:d6:2e:dd", "network": {"id": "2b26bdb4-bedd-49b7-a482-3f58b14f98c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-514736112", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.21", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c29495610a5f46a39670abf9a34ca73a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ae420f-63", "ovs_interfaceid": "84ae420f-63eb-44ec-82d6-f35d63f23506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.514709] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-2129a1eb-4ad7-42ef-9554-6202f7a44f58" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.515859] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9316db2e-da60-48d3-8cb3-d1ed11e1fbac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.524345] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f137529-6139-4e87-bfc0-a02486b9dea0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.552452] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256711, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.576426] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524f230e-58d4-b366-8185-8f5c2155b59e, 'name': SearchDatastore_Task, 'duration_secs': 0.011729} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.576745] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.576992] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478/c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1190.577284] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7629010a-eb32-45bf-bf99-a9cd17611c97 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.585820] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1190.585820] env[61545]: value = "task-4256712" [ 1190.585820] env[61545]: _type = "Task" [ 1190.585820] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.594628] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.716911] env[61545]: INFO nova.compute.manager [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Took 13.77 seconds to build instance. [ 1191.013702] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ea1950a-0b20-492d-8a96-ddca867f9850 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.026199] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093fe41c-86cf-4e8c-b84d-330cf9948e82 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.058241] env[61545]: DEBUG nova.network.neutron [-] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.060084] env[61545]: DEBUG nova.compute.manager [req-9707cf33-f28a-4192-a189-9783717c55c2 req-451b82a2-b779-45e3-bb0d-9772842c9c29 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Detach interface failed, port_id=4c4e5fb5-9ff7-4a97-9cc6-f23a09acbd46, reason: Instance 6ea419eb-a171-4e79-868f-25851fde8a8b could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1191.065605] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256711, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.097350] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256712, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.219011] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f8e18fad-8404-46cf-935c-21a80a18aaf6 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.278s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.430698] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.430943] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.431234] env[61545]: INFO nova.compute.manager [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Attaching volume 5e8021d6-c42a-495c-b751-a9d5a4018b58 to /dev/sdb [ 1191.464061] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41ca451-04da-4981-8da3-24f28d2cbaf1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.471380] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66061a2b-204a-4f77-aef4-54ccc59bc0c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.486501] env[61545]: DEBUG nova.virt.block_device [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Updating existing volume attachment record: aa71ed99-88d4-4ff7-8c12-48e04d700ca1 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1191.554524] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256711, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.562148] env[61545]: INFO nova.compute.manager [-] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Took 1.50 seconds to deallocate network for instance. [ 1191.595999] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534032} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.595999] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478/c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1191.596557] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1191.596846] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65059eb5-3370-4e3e-a98f-d03e8e266a25 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.604982] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1191.604982] env[61545]: value = "task-4256713" [ 1191.604982] env[61545]: _type = "Task" [ 1191.604982] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.614100] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.625415] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1191.625760] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51bb95b2-c6f7-44fb-b000-71f00b18fd30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.633182] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1191.633182] env[61545]: value = "task-4256714" [ 1191.633182] env[61545]: _type = "Task" [ 1191.633182] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.645280] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.844574] env[61545]: INFO nova.compute.manager [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Swapping old allocation on dict_keys(['7015027d-c4e1-4938-ac31-6e4672774d7e']) held by migration 3a035504-5737-4891-8650-0538e13b5dea for instance [ 1191.872713] env[61545]: DEBUG nova.scheduler.client.report [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Overwriting current allocation {'allocations': {'7015027d-c4e1-4938-ac31-6e4672774d7e': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 149}}, 'project_id': 'aa2360863a5f4eff8a88eca0c88fa76d', 'user_id': 'cb00c18cd27541359ae0adf45f5c4171', 'consumer_generation': 1} on consumer 12aed0d0-b5dd-4f1b-913a-000c06a8eab4 {{(pid=61545) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1191.988330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.988552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.988793] env[61545]: DEBUG nova.network.neutron [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1192.057071] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256711, 'name': CreateVM_Task, 'duration_secs': 1.521066} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.057071] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1192.057277] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.057468] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.057860] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1192.058186] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4f0d8cf-5875-4db3-a70f-f97a1ddb8931 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.063574] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1192.063574] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bbafdd-0f74-6c6c-3cab-6378ce98ca43" [ 1192.063574] env[61545]: _type = "Task" [ 1192.063574] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.072842] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.072937] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.073193] env[61545]: DEBUG nova.objects.instance [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lazy-loading 'resources' on Instance uuid 6ea419eb-a171-4e79-868f-25851fde8a8b {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.074194] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bbafdd-0f74-6c6c-3cab-6378ce98ca43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.115680] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091361} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.115974] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1192.116842] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce59d5e-f2e1-4996-a87e-bb5469208027 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.140811] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478/c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.141275] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4eda0837-5385-4802-a2f3-d16c9a1b20a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.165054] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256714, 'name': PowerOffVM_Task, 'duration_secs': 0.26651} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.166507] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1192.167261] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1192.167493] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1192.167649] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1192.167831] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1192.167975] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1192.168154] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1192.168364] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1192.168523] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1192.168689] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1192.168851] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1192.169036] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1192.174286] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1192.174286] env[61545]: value = "task-4256716" [ 1192.174286] env[61545]: _type = "Task" [ 1192.174286] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.174501] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d190fe77-cc0d-49fb-bfea-8bc9043435c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.195065] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1192.195065] env[61545]: value = "task-4256717" [ 1192.195065] env[61545]: _type = "Task" [ 1192.195065] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.204011] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256717, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.505964] env[61545]: DEBUG nova.compute.manager [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1192.551306] env[61545]: DEBUG nova.compute.manager [req-d0319c63-43cb-45ce-8cef-6e86e717572e req-c3b5680c-aa32-4d86-a5bb-4ecc52151596 service nova] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Received event network-vif-deleted-84ae420f-63eb-44ec-82d6-f35d63f23506 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1192.578785] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52bbafdd-0f74-6c6c-3cab-6378ce98ca43, 'name': SearchDatastore_Task, 'duration_secs': 0.012603} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.579578] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.579814] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.580153] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.580251] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.580414] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.580685] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29b15198-568d-42b4-8ffa-fcef6a2bf790 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.603146] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.603146] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1192.606735] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7249cc4-dc1f-4244-8078-b929ad53fa36 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.610948] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1192.610948] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282db14-7da9-84b6-f870-195111cab340" [ 1192.610948] env[61545]: _type = "Task" [ 1192.610948] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.624538] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282db14-7da9-84b6-f870-195111cab340, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.702537] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256716, 'name': ReconfigVM_Task, 'duration_secs': 0.430642} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.703235] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfigured VM instance instance-0000005d to attach disk [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478/c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1192.704524] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'boot_index': 0, 'encryption_format': None, 'encrypted': False, 'device_type': 'disk', 'guest_format': None, 'size': 0, 'encryption_options': None, 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'image_id': 'bf68eb43-6d66-4532-9eb1-af7d78faa698'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'delete_on_termination': False, 'boot_index': None, 'device_type': None, 'attachment_id': '287d2807-863f-4f13-8a9b-bf930498ef1d', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'}, 'guest_format': None, 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=61545) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1192.704749] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1192.705099] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1192.708916] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d625ece3-e567-4a25-a970-c8966f89505e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.711801] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256717, 'name': ReconfigVM_Task, 'duration_secs': 0.229061} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.715580] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9940576-726e-4c98-9f42-9c06084a8529 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.734613] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da1dd4d-4b82-4f9b-b1b5-d2bbaf8516ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.753402] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1192.753640] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1192.753799] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1192.753983] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1192.754148] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1192.754301] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1192.754505] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1192.754662] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1192.754827] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1192.754988] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1192.755216] env[61545]: DEBUG nova.virt.hardware [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1192.758932] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09f1f90a-20f7-43e0-9613-b4b17bbd8bca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.781349] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75/volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.784996] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c909bf92-ccf6-4686-b5c1-7cabf8068c53 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.802780] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1192.802780] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52db4e03-8c6b-7f63-2124-6375f84236ae" [ 1192.802780] env[61545]: _type = "Task" [ 1192.802780] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.808496] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1192.808496] env[61545]: value = "task-4256718" [ 1192.808496] env[61545]: _type = "Task" [ 1192.808496] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.815059] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52db4e03-8c6b-7f63-2124-6375f84236ae, 'name': SearchDatastore_Task, 'duration_secs': 0.009961} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.821651] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1192.822454] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9988adb9-8032-4536-a7b1-cb5a583d79dd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.826284] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57db7c34-f540-476d-9dd2-417ba1d0133f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.847680] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.852395] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8032715-b9ad-40a3-8755-048c517acd4c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.857581] env[61545]: DEBUG nova.network.neutron [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance_info_cache with network_info: [{"id": "55458c7f-c486-49fb-966b-0478ed8948ee", "address": "fa:16:3e:e2:4c:9e", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55458c7f-c4", "ovs_interfaceid": "55458c7f-c486-49fb-966b-0478ed8948ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.858968] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1192.858968] env[61545]: value = "task-4256719" [ 1192.858968] env[61545]: _type = "Task" [ 1192.858968] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.891651] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643be9a2-345c-4f6d-b415-a8f780dd459c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.898486] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256719, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.904553] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc0d090-c887-49c6-a42b-19ab15960d60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.919919] env[61545]: DEBUG nova.compute.provider_tree [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.027055] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.121348] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282db14-7da9-84b6-f870-195111cab340, 'name': SearchDatastore_Task, 'duration_secs': 0.075216} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.122237] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cf2632d-bc09-4bcf-a05d-899dac4c756d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.127786] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1193.127786] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522f3e95-a48c-a2af-891c-98e0072e21ca" [ 1193.127786] env[61545]: _type = "Task" [ 1193.127786] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.136065] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522f3e95-a48c-a2af-891c-98e0072e21ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.318916] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.360981] env[61545]: DEBUG oslo_concurrency.lockutils [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-12aed0d0-b5dd-4f1b-913a-000c06a8eab4" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.361662] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.365140] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-482bd884-c48a-4e9e-9553-21c0d6f9275c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.373160] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256719, 'name': ReconfigVM_Task, 'duration_secs': 0.25336} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.374680] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1193.375083] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1193.375083] env[61545]: value = "task-4256720" [ 1193.375083] env[61545]: _type = "Task" [ 1193.375083] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.375796] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354b8b3c-68f3-4639-88c4-8b11d8905d6e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.391441] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.414807] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58/2129a1eb-4ad7-42ef-9554-6202f7a44f58.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1193.415194] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2fe8c58-7ccc-4f25-aa6c-c0ecaa1e3e44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.430234] env[61545]: DEBUG nova.scheduler.client.report [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1193.441221] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1193.441221] env[61545]: value = "task-4256721" [ 1193.441221] env[61545]: _type = "Task" [ 1193.441221] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.451772] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256721, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.640264] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522f3e95-a48c-a2af-891c-98e0072e21ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010844} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.640684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.640805] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5393730d-6a4b-418a-9047-4287f87c8d14/5393730d-6a4b-418a-9047-4287f87c8d14.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1193.641148] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1688bbd5-a90a-44e5-83a5-6b3eeb0a20e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.648361] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1193.648361] env[61545]: value = "task-4256722" [ 1193.648361] env[61545]: _type = "Task" [ 1193.648361] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.657261] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256722, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.819952] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256718, 'name': ReconfigVM_Task, 'duration_secs': 0.771895} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.820305] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfigured VM instance instance-0000005d to attach disk [datastore1] volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75/volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1193.825465] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58688389-d5ac-4e5f-a1b8-6b045d27070d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.842102] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1193.842102] env[61545]: value = "task-4256724" [ 1193.842102] env[61545]: _type = "Task" [ 1193.842102] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.851717] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256724, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.889037] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256720, 'name': PowerOffVM_Task, 'duration_secs': 0.289292} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.889195] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1193.889897] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1193.890175] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.890378] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1193.890575] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1193.890734] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1193.890891] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1193.891160] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1193.891353] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1193.891527] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1193.891691] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1193.891869] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1193.898847] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66bc2d85-b522-461a-83dd-9b393b831b1d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.916180] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1193.916180] env[61545]: value = "task-4256725" [ 1193.916180] env[61545]: _type = "Task" [ 1193.916180] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.929616] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.936024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.863s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.939699] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.912s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.953082] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256721, 'name': ReconfigVM_Task, 'duration_secs': 0.466968} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.953759] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58/2129a1eb-4ad7-42ef-9554-6202f7a44f58.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1193.954649] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc953a4-1aac-4ecc-86fa-05c7b2211e76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.980083] env[61545]: INFO nova.scheduler.client.report [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Deleted allocations for instance 6ea419eb-a171-4e79-868f-25851fde8a8b [ 1193.981798] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94980c53-9cc3-4852-b8ab-0745cdc4ba8f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.014448] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e830c86e-e4b7-48d8-b728-5421be728d9f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.037812] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe10a4e6-d892-41ee-81f2-6254e6076439 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.047704] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1194.048078] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7465ea08-8861-4b38-8da8-98cf52a64100 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.055679] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1194.055679] env[61545]: value = "task-4256726" [ 1194.055679] env[61545]: _type = "Task" [ 1194.055679] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.066461] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.159877] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256722, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.352235] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.427274] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256725, 'name': ReconfigVM_Task, 'duration_secs': 0.376188} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.428198] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3a4a10-7da9-48c3-a33d-af1b90bed2b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.448908] env[61545]: INFO nova.compute.claims [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.453168] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1194.453420] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.453575] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1194.453754] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.453899] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1194.454059] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1194.454267] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1194.454426] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1194.454590] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1194.454749] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1194.454918] env[61545]: DEBUG nova.virt.hardware [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1194.456045] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96d1b74d-0b57-4a09-af34-d472632f638c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.462478] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1194.462478] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ec12bf-6961-062f-0f8f-1edb290d7bc3" [ 1194.462478] env[61545]: _type = "Task" [ 1194.462478] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.470998] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ec12bf-6961-062f-0f8f-1edb290d7bc3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.491687] env[61545]: DEBUG oslo_concurrency.lockutils [None req-58634b01-fc2d-4747-9fee-af0ee3b0409f tempest-ServersTestMultiNic-619370263 tempest-ServersTestMultiNic-619370263-project-member] Lock "6ea419eb-a171-4e79-868f-25851fde8a8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.119s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.566213] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256726, 'name': PowerOnVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.630143] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.630445] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.630659] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.630840] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.631010] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.631166] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.632018] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.632018] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1194.632018] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.659873] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256722, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553202} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.660248] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5393730d-6a4b-418a-9047-4287f87c8d14/5393730d-6a4b-418a-9047-4287f87c8d14.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1194.660490] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1194.660690] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2e22523-53b5-4860-8c0a-a9d9ec19c3f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.668342] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1194.668342] env[61545]: value = "task-4256727" [ 1194.668342] env[61545]: _type = "Task" [ 1194.668342] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.678101] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.853813] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.959332] env[61545]: INFO nova.compute.resource_tracker [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating resource usage from migration b21cc98f-d8c9-498e-8fbd-31e7f1478c55 [ 1194.978722] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ec12bf-6961-062f-0f8f-1edb290d7bc3, 'name': SearchDatastore_Task, 'duration_secs': 0.018816} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.984524] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfiguring VM instance instance-0000005b to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1194.987944] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9552f10-a7b4-433e-ab75-6610255715e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.008350] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1195.008350] env[61545]: value = "task-4256728" [ 1195.008350] env[61545]: _type = "Task" [ 1195.008350] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.017881] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256728, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.070378] env[61545]: DEBUG oslo_vmware.api [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256726, 'name': PowerOnVM_Task, 'duration_secs': 0.584487} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.070691] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1195.134476] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.145048] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911fcca9-cc1b-44bb-b50d-91e60b50b114 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.153725] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4f8ce4-b7eb-4c98-a355-c8df04a0d092 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.188726] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c99c23a-4294-4a5a-bfb5-ceefc8ba2e6b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.196576] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071203} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.200436] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1195.200436] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1c8a35-4e85-4d3a-8cd1-b5d32ebef0e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.203071] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f830d2-0e8f-4c05-aa03-66b9af0af251 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.227385] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 5393730d-6a4b-418a-9047-4287f87c8d14/5393730d-6a4b-418a-9047-4287f87c8d14.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1195.236129] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7426c20-6839-4baa-ad92-a46b1cdfc2c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.251040] env[61545]: DEBUG nova.compute.provider_tree [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.260108] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1195.260108] env[61545]: value = "task-4256729" [ 1195.260108] env[61545]: _type = "Task" [ 1195.260108] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.271610] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256729, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.357753] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256724, 'name': ReconfigVM_Task, 'duration_secs': 1.150765} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.358154] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1195.358945] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06067100-b928-428c-8e55-6ffc8289c389 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.366040] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1195.366040] env[61545]: value = "task-4256730" [ 1195.366040] env[61545]: _type = "Task" [ 1195.366040] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.377974] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256730, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.519075] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256728, 'name': ReconfigVM_Task, 'duration_secs': 0.479721} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.519393] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfigured VM instance instance-0000005b to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1195.520254] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0489d0-5a8a-4c69-ac57-d6fe96a3883d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.542998] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4/12aed0d0-b5dd-4f1b-913a-000c06a8eab4.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1195.543351] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2395941a-97e7-42a1-8d35-7bf9659ed448 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.562128] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1195.562128] env[61545]: value = "task-4256731" [ 1195.562128] env[61545]: _type = "Task" [ 1195.562128] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.572970] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256731, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.754502] env[61545]: DEBUG nova.scheduler.client.report [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1195.770632] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256729, 'name': ReconfigVM_Task, 'duration_secs': 0.43308} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.770632] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 5393730d-6a4b-418a-9047-4287f87c8d14/5393730d-6a4b-418a-9047-4287f87c8d14.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1195.771150] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4322aec8-e0d3-4dde-bca4-c517b3051145 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.779189] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1195.779189] env[61545]: value = "task-4256732" [ 1195.779189] env[61545]: _type = "Task" [ 1195.779189] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.788734] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256732, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.852799] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f732176e-60ad-41b2-9270-fc0fe4c68fde tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.853100] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f732176e-60ad-41b2-9270-fc0fe4c68fde tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.853463] env[61545]: DEBUG nova.objects.instance [None req-f732176e-60ad-41b2-9270-fc0fe4c68fde tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'flavor' on Instance uuid 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.879059] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256730, 'name': Rename_Task, 'duration_secs': 0.16687} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.879266] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1195.879405] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b0a08aa-a407-4214-b492-f56119a8153e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.888191] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1195.888191] env[61545]: value = "task-4256733" [ 1195.888191] env[61545]: _type = "Task" [ 1195.888191] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.896731] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256733, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.039355] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1196.039355] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838835', 'volume_id': '5e8021d6-c42a-495c-b751-a9d5a4018b58', 'name': 'volume-5e8021d6-c42a-495c-b751-a9d5a4018b58', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0ae4965-42eb-4286-8cd9-a5c82426f1bf', 'attached_at': '', 'detached_at': '', 'volume_id': '5e8021d6-c42a-495c-b751-a9d5a4018b58', 'serial': '5e8021d6-c42a-495c-b751-a9d5a4018b58'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1196.040118] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceaf93ad-a7ae-480b-89be-7f1c22e85669 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.057137] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bb60fd-062c-4851-ba2e-371933db2a7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.087849] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] volume-5e8021d6-c42a-495c-b751-a9d5a4018b58/volume-5e8021d6-c42a-495c-b751-a9d5a4018b58.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1196.092196] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9af5ccd5-2929-4af8-9cc3-0b7ce0efacba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.109101] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256731, 'name': ReconfigVM_Task, 'duration_secs': 0.520025} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.109782] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4/12aed0d0-b5dd-4f1b-913a-000c06a8eab4.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1196.110708] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e781195-7ca8-4ea7-9967-60018793c189 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.114911] env[61545]: DEBUG oslo_vmware.api [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1196.114911] env[61545]: value = "task-4256734" [ 1196.114911] env[61545]: _type = "Task" [ 1196.114911] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.135316] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd017ad-a9f6-4dc4-b8c2-13278fa7091c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.142758] env[61545]: DEBUG oslo_vmware.api [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.144382] env[61545]: INFO nova.compute.manager [None req-7c8223f9-cc56-4954-b3f1-ce29e0104104 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance to original state: 'active' [ 1196.172032] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc2e4ca-47e8-46eb-b0b5-420a652ac5a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.192791] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6789bf00-99d2-44c7-89bf-c351b2a68b2a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.201710] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1196.201980] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b03d309f-98f4-407a-85e2-ad633e4ce87e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.209274] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1196.209274] env[61545]: value = "task-4256735" [ 1196.209274] env[61545]: _type = "Task" [ 1196.209274] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.217964] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.261879] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.323s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.262047] env[61545]: INFO nova.compute.manager [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Migrating [ 1196.268657] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.134s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.268843] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.269009] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1196.273073] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c123e2de-b015-49a9-b4a6-417819b9a5dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.294293] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f928d102-641d-4b02-88f9-94a257fa1492 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.302238] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256732, 'name': Rename_Task, 'duration_secs': 0.15315} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.303774] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1196.304038] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-016a0975-acbd-460b-8492-1ca092f76c5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.315848] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3814a4-b5a2-4d81-86fa-5b59a742d4b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.323325] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1196.323325] env[61545]: value = "task-4256736" [ 1196.323325] env[61545]: _type = "Task" [ 1196.323325] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.328960] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8c08a9-a60b-440d-aa93-6f5865667648 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.342827] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256736, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.372036] env[61545]: DEBUG nova.objects.instance [None req-f732176e-60ad-41b2-9270-fc0fe4c68fde tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'pci_requests' on Instance uuid 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.372984] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179780MB free_disk=245GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1196.373224] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.373552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.399033] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256733, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.626664] env[61545]: DEBUG oslo_vmware.api [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256734, 'name': ReconfigVM_Task, 'duration_secs': 0.406741} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.626986] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfigured VM instance instance-00000060 to attach disk [datastore1] volume-5e8021d6-c42a-495c-b751-a9d5a4018b58/volume-5e8021d6-c42a-495c-b751-a9d5a4018b58.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1196.632013] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01c44df9-59bb-41cf-8bb9-c7833c6fa035 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.652354] env[61545]: DEBUG oslo_vmware.api [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1196.652354] env[61545]: value = "task-4256737" [ 1196.652354] env[61545]: _type = "Task" [ 1196.652354] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.662566] env[61545]: DEBUG oslo_vmware.api [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256737, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.721464] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256735, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.785853] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.785853] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.785853] env[61545]: DEBUG nova.network.neutron [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.838276] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256736, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.883851] env[61545]: DEBUG nova.objects.base [None req-f732176e-60ad-41b2-9270-fc0fe4c68fde tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Object Instance<3b4fd643-c536-4da9-b1a3-82cd74d24f3e> lazy-loaded attributes: flavor,pci_requests {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1196.884066] env[61545]: DEBUG nova.network.neutron [None req-f732176e-60ad-41b2-9270-fc0fe4c68fde tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1196.901600] env[61545]: DEBUG oslo_vmware.api [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256733, 'name': PowerOnVM_Task, 'duration_secs': 0.662946} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.902009] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1196.902267] env[61545]: DEBUG nova.compute.manager [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1196.903290] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea100b3a-dbd5-472b-91b6-36da00c93337 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.024815] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f732176e-60ad-41b2-9270-fc0fe4c68fde tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.172s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.163024] env[61545]: DEBUG oslo_vmware.api [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256737, 'name': ReconfigVM_Task, 'duration_secs': 0.182398} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.163697] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838835', 'volume_id': '5e8021d6-c42a-495c-b751-a9d5a4018b58', 'name': 'volume-5e8021d6-c42a-495c-b751-a9d5a4018b58', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0ae4965-42eb-4286-8cd9-a5c82426f1bf', 'attached_at': '', 'detached_at': '', 'volume_id': '5e8021d6-c42a-495c-b751-a9d5a4018b58', 'serial': '5e8021d6-c42a-495c-b751-a9d5a4018b58'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1197.222584] env[61545]: DEBUG oslo_vmware.api [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256735, 'name': PowerOnVM_Task, 'duration_secs': 0.588161} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.222999] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1197.334944] env[61545]: DEBUG oslo_vmware.api [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256736, 'name': PowerOnVM_Task, 'duration_secs': 0.972503} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.335433] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1197.335692] env[61545]: INFO nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Took 10.63 seconds to spawn the instance on the hypervisor. [ 1197.335926] env[61545]: DEBUG nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1197.336786] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390e2662-ed99-4836-b6af-29c4d2c88185 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.389446] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Applying migration context for instance 219981bd-04cd-4253-a15e-eebd4083bfa8 as it has an incoming, in-progress migration b21cc98f-d8c9-498e-8fbd-31e7f1478c55. Migration status is pre-migrating {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1197.390951] env[61545]: INFO nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating resource usage from migration b21cc98f-d8c9-498e-8fbd-31e7f1478c55 [ 1197.415425] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d0f42893-3332-4027-93df-bb46e3350485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.415425] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.415425] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e0ae4965-42eb-4286-8cd9-a5c82426f1bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.415425] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 3b4fd643-c536-4da9-b1a3-82cd74d24f3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.415581] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.415581] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 2129a1eb-4ad7-42ef-9554-6202f7a44f58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.415673] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 12aed0d0-b5dd-4f1b-913a-000c06a8eab4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.415901] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5393730d-6a4b-418a-9047-4287f87c8d14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.416036] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Migration b21cc98f-d8c9-498e-8fbd-31e7f1478c55 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1197.416148] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 219981bd-04cd-4253-a15e-eebd4083bfa8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1197.416434] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1197.416590] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=250GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1197.434296] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.610647] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3e045d-3d5f-4ddc-8d0f-498c11050e23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.620972] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83f5e56-bf60-42ab-a9d1-6ebf5aa2af43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.659997] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9818717-808b-4980-9b0e-5b48f291d62b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.671827] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e70759-27b0-4f32-a4c6-9cf7825a15e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.686243] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.707258] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.707523] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.707790] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.707914] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.708098] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.710554] env[61545]: INFO nova.compute.manager [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Terminating instance [ 1197.859438] env[61545]: INFO nova.compute.manager [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Took 15.50 seconds to build instance. [ 1197.934301] env[61545]: DEBUG nova.network.neutron [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance_info_cache with network_info: [{"id": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "address": "fa:16:3e:c6:7d:49", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdfe-4d", "ovs_interfaceid": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.189697] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.216943] env[61545]: DEBUG nova.objects.instance [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid e0ae4965-42eb-4286-8cd9-a5c82426f1bf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.223449] env[61545]: DEBUG nova.compute.manager [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1198.224251] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1198.226130] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d12adb49-1f18-47ca-a3bb-e842522ca8c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.234433] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1198.234433] env[61545]: value = "task-4256738" [ 1198.234433] env[61545]: _type = "Task" [ 1198.234433] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.241873] env[61545]: INFO nova.compute.manager [None req-370467a7-6ff8-4157-88a4-613aed0cf86d tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance to original state: 'active' [ 1198.253210] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256738, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.362542] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6904f097-9495-4161-a35c-ac1330e28f6c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.016s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.436844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.696847] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1198.698073] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.324s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.698073] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.263s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.698073] env[61545]: DEBUG nova.objects.instance [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1198.700779] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1198.700979] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Cleaning up deleted instances with incomplete migration {{(pid=61545) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11903}} [ 1198.726835] env[61545]: DEBUG oslo_concurrency.lockutils [None req-18fa6a6e-245a-4b18-ac99-babc9ece40dd tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.296s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.753259] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256738, 'name': PowerOffVM_Task, 'duration_secs': 0.400431} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.754492] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1198.755078] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1198.755143] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838811', 'volume_id': '627e30aa-7351-463d-9453-98a2cb96ea31', 'name': 'volume-627e30aa-7351-463d-9453-98a2cb96ea31', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '2129a1eb-4ad7-42ef-9554-6202f7a44f58', 'attached_at': '2025-06-03T12:55:47.000000', 'detached_at': '', 'volume_id': '627e30aa-7351-463d-9453-98a2cb96ea31', 'serial': '627e30aa-7351-463d-9453-98a2cb96ea31'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1198.760282] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52f61c3-4064-4617-9df8-c4b4281640f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.794058] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657c4441-e765-4721-9f26-d43bbb98c09c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.803469] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0a4c5b-c17a-4de9-98f1-00d80ffdce06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.825573] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4a961f-544d-4840-87bd-d71bbdfeaa21 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.842229] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] The volume has not been displaced from its original location: [datastore1] volume-627e30aa-7351-463d-9453-98a2cb96ea31/volume-627e30aa-7351-463d-9453-98a2cb96ea31.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1198.847594] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1198.847925] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73650d7b-a584-42d6-95b6-812b77e1c26f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.867580] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1198.867580] env[61545]: value = "task-4256739" [ 1198.867580] env[61545]: _type = "Task" [ 1198.867580] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.876982] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256739, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.918833] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.919253] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.919587] env[61545]: DEBUG nova.objects.instance [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'flavor' on Instance uuid 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.003362] env[61545]: DEBUG nova.compute.manager [req-74f6fa32-2dcc-4d35-aab8-9ffa11080340 req-02100167-c379-41c0-be20-9bcca448be88 service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Received event network-changed-ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1199.003473] env[61545]: DEBUG nova.compute.manager [req-74f6fa32-2dcc-4d35-aab8-9ffa11080340 req-02100167-c379-41c0-be20-9bcca448be88 service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Refreshing instance network info cache due to event network-changed-ce048a59-c941-4a83-bbf9-29dfc46aae60. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1199.003711] env[61545]: DEBUG oslo_concurrency.lockutils [req-74f6fa32-2dcc-4d35-aab8-9ffa11080340 req-02100167-c379-41c0-be20-9bcca448be88 service nova] Acquiring lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.003884] env[61545]: DEBUG oslo_concurrency.lockutils [req-74f6fa32-2dcc-4d35-aab8-9ffa11080340 req-02100167-c379-41c0-be20-9bcca448be88 service nova] Acquired lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.004164] env[61545]: DEBUG nova.network.neutron [req-74f6fa32-2dcc-4d35-aab8-9ffa11080340 req-02100167-c379-41c0-be20-9bcca448be88 service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Refreshing network info cache for port ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1199.379587] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256739, 'name': ReconfigVM_Task, 'duration_secs': 0.309251} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.379986] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1199.385950] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9a65ac5-4951-4e10-9a76-458440cfed3f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.406909] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1199.406909] env[61545]: value = "task-4256740" [ 1199.406909] env[61545]: _type = "Task" [ 1199.406909] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.419530] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.532566] env[61545]: DEBUG nova.objects.instance [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'pci_requests' on Instance uuid 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.717119] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e0fb3903-3fcc-4789-b2a0-5b2df52885f8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.924414] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256740, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.939170] env[61545]: DEBUG nova.network.neutron [req-74f6fa32-2dcc-4d35-aab8-9ffa11080340 req-02100167-c379-41c0-be20-9bcca448be88 service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Updated VIF entry in instance network info cache for port ce048a59-c941-4a83-bbf9-29dfc46aae60. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1199.939572] env[61545]: DEBUG nova.network.neutron [req-74f6fa32-2dcc-4d35-aab8-9ffa11080340 req-02100167-c379-41c0-be20-9bcca448be88 service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Updating instance_info_cache with network_info: [{"id": "ce048a59-c941-4a83-bbf9-29dfc46aae60", "address": "fa:16:3e:c6:94:82", "network": {"id": "8e26fd19-34b0-4746-917e-5e80fcca887a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-202804360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeb51ace7650413b987be7ddd7490182", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10ff2092-e8eb-4768-ad4a-65a80560b447", "external-id": "nsx-vlan-transportzone-845", "segmentation_id": 845, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce048a59-c9", "ovs_interfaceid": "ce048a59-c941-4a83-bbf9-29dfc46aae60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.954390] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c589734-d094-42b5-afbe-9b50e7eaa847 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.974505] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance '219981bd-04cd-4253-a15e-eebd4083bfa8' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1200.008636] env[61545]: DEBUG oslo_concurrency.lockutils [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.008913] env[61545]: DEBUG oslo_concurrency.lockutils [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.034008] env[61545]: DEBUG nova.objects.base [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Object Instance<3b4fd643-c536-4da9-b1a3-82cd74d24f3e> lazy-loaded attributes: flavor,pci_requests {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1200.034305] env[61545]: DEBUG nova.network.neutron [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1200.073689] env[61545]: DEBUG nova.policy [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1200.280627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.281397] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.281604] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.281813] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.282011] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.284378] env[61545]: INFO nova.compute.manager [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Terminating instance [ 1200.396938] env[61545]: DEBUG nova.network.neutron [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Successfully created port: d2fa1081-8512-43d3-ba49-d739e11278fe {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1200.419239] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256740, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.442703] env[61545]: DEBUG oslo_concurrency.lockutils [req-74f6fa32-2dcc-4d35-aab8-9ffa11080340 req-02100167-c379-41c0-be20-9bcca448be88 service nova] Releasing lock "refresh_cache-5393730d-6a4b-418a-9047-4287f87c8d14" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.484902] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.485172] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28660caa-65a8-4e9a-a6fe-f3add8ad72f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.494132] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1200.494132] env[61545]: value = "task-4256741" [ 1200.494132] env[61545]: _type = "Task" [ 1200.494132] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.505011] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.513883] env[61545]: DEBUG nova.compute.utils [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1200.789731] env[61545]: DEBUG nova.compute.manager [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1200.789928] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1200.791395] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faff38c7-738c-4e50-b870-8b93fe4304fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.799718] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.800018] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b59102ad-2b0c-434e-bb8f-f461006b3654 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.810622] env[61545]: DEBUG oslo_vmware.api [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1200.810622] env[61545]: value = "task-4256742" [ 1200.810622] env[61545]: _type = "Task" [ 1200.810622] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.820272] env[61545]: DEBUG oslo_vmware.api [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.919914] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256740, 'name': ReconfigVM_Task, 'duration_secs': 1.419248} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.920312] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838811', 'volume_id': '627e30aa-7351-463d-9453-98a2cb96ea31', 'name': 'volume-627e30aa-7351-463d-9453-98a2cb96ea31', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '2129a1eb-4ad7-42ef-9554-6202f7a44f58', 'attached_at': '2025-06-03T12:55:47.000000', 'detached_at': '', 'volume_id': '627e30aa-7351-463d-9453-98a2cb96ea31', 'serial': '627e30aa-7351-463d-9453-98a2cb96ea31'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1200.920617] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1200.921546] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8ad458-f5d1-4168-891a-afe999165286 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.930209] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1200.930527] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf084240-28db-4242-b289-04b0cbff32b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.006278] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256741, 'name': PowerOffVM_Task, 'duration_secs': 0.483735} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.006628] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.006853] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance '219981bd-04cd-4253-a15e-eebd4083bfa8' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.012712] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.013059] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.013269] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleting the datastore file [datastore2] 2129a1eb-4ad7-42ef-9554-6202f7a44f58 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.013899] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ffe084a-d8c6-45f9-81bd-f3411f24d669 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.018598] env[61545]: DEBUG oslo_concurrency.lockutils [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.021706] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1201.021706] env[61545]: value = "task-4256744" [ 1201.021706] env[61545]: _type = "Task" [ 1201.021706] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.031965] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.323636] env[61545]: DEBUG oslo_vmware.api [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256742, 'name': PowerOffVM_Task, 'duration_secs': 0.231733} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.323922] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.324112] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.324380] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4e766e3-aa87-4ee9-8f26-74b0fa7c6b8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.389978] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.389978] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.389978] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleting the datastore file [datastore2] 12aed0d0-b5dd-4f1b-913a-000c06a8eab4 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.389978] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3760cbe9-ca9a-446f-8107-886be58e7187 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.398212] env[61545]: DEBUG oslo_vmware.api [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1201.398212] env[61545]: value = "task-4256746" [ 1201.398212] env[61545]: _type = "Task" [ 1201.398212] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.407590] env[61545]: DEBUG oslo_vmware.api [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256746, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.517165] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1201.517449] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.517583] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1201.517768] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.517916] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1201.518086] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1201.518296] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1201.518457] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1201.518623] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1201.518796] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1201.520773] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1201.524248] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4464a42-988a-49cf-a4fd-4d26ac9ad77c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.543923] env[61545]: DEBUG oslo_vmware.api [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158887} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.545411] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1201.546040] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1201.546040] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1201.546040] env[61545]: INFO nova.compute.manager [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Took 3.32 seconds to destroy the instance on the hypervisor. [ 1201.546291] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1201.546526] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1201.546526] env[61545]: value = "task-4256747" [ 1201.546526] env[61545]: _type = "Task" [ 1201.546526] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.547652] env[61545]: DEBUG nova.compute.manager [-] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1201.547652] env[61545]: DEBUG nova.network.neutron [-] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1201.558775] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256747, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.700645] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.700913] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.701098] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.701258] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.701402] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1201.701564] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.913983] env[61545]: DEBUG oslo_vmware.api [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256746, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160249} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.914304] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1201.914555] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1201.914715] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1201.914895] env[61545]: INFO nova.compute.manager [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1201.915278] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1201.915492] env[61545]: DEBUG nova.compute.manager [-] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1201.915573] env[61545]: DEBUG nova.network.neutron [-] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.060920] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256747, 'name': ReconfigVM_Task, 'duration_secs': 0.345397} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.061291] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance '219981bd-04cd-4253-a15e-eebd4083bfa8' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1202.095133] env[61545]: DEBUG oslo_concurrency.lockutils [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.095405] env[61545]: DEBUG oslo_concurrency.lockutils [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.095650] env[61545]: INFO nova.compute.manager [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Attaching volume c823d33f-da1f-4f2b-bda1-6e5c472699ea to /dev/sdc [ 1202.132360] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c412a83c-37d9-486c-994c-19f029c82313 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.142103] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158dcca8-7352-4e79-8f49-8804a3c7d219 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.158263] env[61545]: DEBUG nova.virt.block_device [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Updating existing volume attachment record: efad9a23-ff80-48df-bff7-b2ca27ff63c3 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1202.205195] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.205459] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.205661] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.205798] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1202.206718] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45060a10-49dd-4db6-be4a-8904ae7f258b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.215773] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9253bd65-7297-482f-8462-7b17a98a49e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.231833] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0c7b02-ba5f-4cc1-b93f-1e2d13e2b53b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.239641] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1fa427-7cb1-484d-8741-ab17b6778688 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.272632] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179780MB free_disk=245GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1202.272866] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.273088] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.485541] env[61545]: DEBUG nova.network.neutron [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Successfully updated port: d2fa1081-8512-43d3-ba49-d739e11278fe {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1202.570589] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1202.570875] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1202.571972] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1202.573171] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1202.573347] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1202.573502] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1202.573715] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1202.573893] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1202.574064] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1202.574239] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1202.574423] env[61545]: DEBUG nova.virt.hardware [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1202.581146] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1202.581946] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b0e50d0-3ae9-4e9a-8f4b-28ea1dd08172 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.616457] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1202.616457] env[61545]: value = "task-4256749" [ 1202.616457] env[61545]: _type = "Task" [ 1202.616457] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.629019] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256749, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.958859] env[61545]: DEBUG nova.compute.manager [req-207be072-5fd4-431f-be12-f1681608f0d3 req-3ab4ef59-efb7-49f7-843b-7043c20555cf service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-vif-plugged-d2fa1081-8512-43d3-ba49-d739e11278fe {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1202.959190] env[61545]: DEBUG oslo_concurrency.lockutils [req-207be072-5fd4-431f-be12-f1681608f0d3 req-3ab4ef59-efb7-49f7-843b-7043c20555cf service nova] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.959431] env[61545]: DEBUG oslo_concurrency.lockutils [req-207be072-5fd4-431f-be12-f1681608f0d3 req-3ab4ef59-efb7-49f7-843b-7043c20555cf service nova] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.959604] env[61545]: DEBUG oslo_concurrency.lockutils [req-207be072-5fd4-431f-be12-f1681608f0d3 req-3ab4ef59-efb7-49f7-843b-7043c20555cf service nova] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.959780] env[61545]: DEBUG nova.compute.manager [req-207be072-5fd4-431f-be12-f1681608f0d3 req-3ab4ef59-efb7-49f7-843b-7043c20555cf service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] No waiting events found dispatching network-vif-plugged-d2fa1081-8512-43d3-ba49-d739e11278fe {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1202.960370] env[61545]: WARNING nova.compute.manager [req-207be072-5fd4-431f-be12-f1681608f0d3 req-3ab4ef59-efb7-49f7-843b-7043c20555cf service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received unexpected event network-vif-plugged-d2fa1081-8512-43d3-ba49-d739e11278fe for instance with vm_state active and task_state None. [ 1202.989977] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.990206] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1202.990394] env[61545]: DEBUG nova.network.neutron [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1203.114569] env[61545]: DEBUG nova.network.neutron [-] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.127915] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256749, 'name': ReconfigVM_Task, 'duration_secs': 0.19297} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.129490] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1203.131239] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff06f9d-431f-440d-9560-2b5cfba50b14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.158467] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 219981bd-04cd-4253-a15e-eebd4083bfa8/219981bd-04cd-4253-a15e-eebd4083bfa8.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.159597] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53391596-c9da-45c9-a131-92fc3da2005c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.181599] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1203.181599] env[61545]: value = "task-4256750" [ 1203.181599] env[61545]: _type = "Task" [ 1203.181599] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.190146] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256750, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.291243] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Applying migration context for instance 219981bd-04cd-4253-a15e-eebd4083bfa8 as it has an incoming, in-progress migration b21cc98f-d8c9-498e-8fbd-31e7f1478c55. Migration status is migrating {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1203.292593] env[61545]: INFO nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating resource usage from migration b21cc98f-d8c9-498e-8fbd-31e7f1478c55 [ 1203.302040] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.302312] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.345812] env[61545]: DEBUG nova.network.neutron [-] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.422025] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d0f42893-3332-4027-93df-bb46e3350485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.422174] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.422399] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e0ae4965-42eb-4286-8cd9-a5c82426f1bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.422399] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 3b4fd643-c536-4da9-b1a3-82cd74d24f3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.422516] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.422670] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 2129a1eb-4ad7-42ef-9554-6202f7a44f58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.422791] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 12aed0d0-b5dd-4f1b-913a-000c06a8eab4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.422905] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5393730d-6a4b-418a-9047-4287f87c8d14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.423029] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Migration b21cc98f-d8c9-498e-8fbd-31e7f1478c55 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1203.423145] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 219981bd-04cd-4253-a15e-eebd4083bfa8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1203.535107] env[61545]: WARNING nova.network.neutron [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] 980f8e73-b8ce-492a-90f5-f43e01dc44cd already exists in list: networks containing: ['980f8e73-b8ce-492a-90f5-f43e01dc44cd']. ignoring it [ 1203.623450] env[61545]: INFO nova.compute.manager [-] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Took 2.08 seconds to deallocate network for instance. [ 1203.690963] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256750, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.806872] env[61545]: DEBUG nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1203.850399] env[61545]: INFO nova.compute.manager [-] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Took 1.93 seconds to deallocate network for instance. [ 1203.855038] env[61545]: DEBUG nova.network.neutron [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d2fa1081-8512-43d3-ba49-d739e11278fe", "address": "fa:16:3e:a4:7e:3e", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2fa1081-85", "ovs_interfaceid": "d2fa1081-8512-43d3-ba49-d739e11278fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.926887] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 9a59f45b-727f-45ea-ad33-64fa23aaffe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1203.926887] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1203.927052] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=250GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1204.079213] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e2c7dc-4444-4c89-b84f-5f9286cc2c65 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.087382] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b044bae-aa98-452c-846d-be5e6fb78881 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.118024] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf780fd-f08f-4e43-9e75-324d88d80fee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.125934] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7589b810-a5a2-4169-8d08-f809f9815dfd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.141573] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.173399] env[61545]: INFO nova.compute.manager [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Took 0.55 seconds to detach 1 volumes for instance. [ 1204.190530] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256750, 'name': ReconfigVM_Task, 'duration_secs': 0.721851} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.190866] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 219981bd-04cd-4253-a15e-eebd4083bfa8/219981bd-04cd-4253-a15e-eebd4083bfa8.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1204.191203] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance '219981bd-04cd-4253-a15e-eebd4083bfa8' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1204.327684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.361174] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.361403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.361577] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.362432] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.363358] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b454e9e-68f5-4efb-9dce-1604df0e89d2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.381459] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1204.381459] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.381687] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1204.381687] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.381865] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1204.382055] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1204.382290] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1204.383770] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1204.383770] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1204.383770] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1204.383770] env[61545]: DEBUG nova.virt.hardware [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1204.389539] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfiguring VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1204.389633] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecd206c5-7beb-4e55-bad1-64a34396c892 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.408980] env[61545]: DEBUG oslo_vmware.api [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1204.408980] env[61545]: value = "task-4256751" [ 1204.408980] env[61545]: _type = "Task" [ 1204.408980] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.418741] env[61545]: DEBUG oslo_vmware.api [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256751, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.645361] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1204.679742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.698569] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a19fb5-3b94-4de7-9abe-8c18e1347227 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.719437] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb4d559-c76c-4f68-8609-d908e0fef793 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.738287] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance '219981bd-04cd-4253-a15e-eebd4083bfa8' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1204.919803] env[61545]: DEBUG oslo_vmware.api [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.079643] env[61545]: DEBUG nova.compute.manager [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-changed-d2fa1081-8512-43d3-ba49-d739e11278fe {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1205.079837] env[61545]: DEBUG nova.compute.manager [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Refreshing instance network info cache due to event network-changed-d2fa1081-8512-43d3-ba49-d739e11278fe. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1205.080068] env[61545]: DEBUG oslo_concurrency.lockutils [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] Acquiring lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.080354] env[61545]: DEBUG oslo_concurrency.lockutils [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] Acquired lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.080414] env[61545]: DEBUG nova.network.neutron [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Refreshing network info cache for port d2fa1081-8512-43d3-ba49-d739e11278fe {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1205.150942] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1205.151324] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.878s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.151464] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.824s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.153305] env[61545]: INFO nova.compute.claims [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1205.155847] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.155995] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Cleaning up deleted instances {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11865}} [ 1205.281028] env[61545]: DEBUG nova.network.neutron [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Port f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1205.371551] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.371824] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.372014] env[61545]: INFO nova.compute.manager [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Shelving [ 1205.419557] env[61545]: DEBUG oslo_vmware.api [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256751, 'name': ReconfigVM_Task, 'duration_secs': 0.737981} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.420019] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.420308] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfigured VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1205.674258] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] There are 58 instances to clean {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11874}} [ 1205.674459] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 6ea419eb-a171-4e79-868f-25851fde8a8b] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1205.793980] env[61545]: DEBUG nova.network.neutron [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updated VIF entry in instance network info cache for port d2fa1081-8512-43d3-ba49-d739e11278fe. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1205.794405] env[61545]: DEBUG nova.network.neutron [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d2fa1081-8512-43d3-ba49-d739e11278fe", "address": "fa:16:3e:a4:7e:3e", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2fa1081-85", "ovs_interfaceid": "d2fa1081-8512-43d3-ba49-d739e11278fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.834580] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85317c1d-089f-4fd8-a4a2-02992537e1b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.843399] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd58710-eb88-4604-aade-6d91c1787e6b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.880815] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a77be1-e04c-4a5f-896f-3d1cb157cd7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.890993] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cf4251-d1d8-4781-b9d2-6cba968f6815 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.906816] env[61545]: DEBUG nova.compute.provider_tree [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.924522] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8d89bf55-9cf5-4e42-992e-44a9f479a848 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.005s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.184107] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 00c4a77a-e049-4511-95c9-e4b6596490c5] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1206.296756] env[61545]: DEBUG oslo_concurrency.lockutils [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] Releasing lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.297540] env[61545]: DEBUG nova.compute.manager [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] [instance: 2129a1eb-4ad7-42ef-9554-6202f7a44f58] Received event network-vif-deleted-b793a557-dfec-4ca2-b8b7-9c9c6fcec8cc {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1206.297540] env[61545]: DEBUG nova.compute.manager [req-f6c1e716-4dcc-4de4-8a2a-4c034b7f7501 req-39dee3c2-290a-4264-a345-78f7421c4530 service nova] [instance: 12aed0d0-b5dd-4f1b-913a-000c06a8eab4] Received event network-vif-deleted-55458c7f-c486-49fb-966b-0478ed8948ee {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1206.303569] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.303785] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.304171] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.385952] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.386361] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd93041a-df8e-4e6c-950c-7a6362fca908 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.394924] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1206.394924] env[61545]: value = "task-4256753" [ 1206.394924] env[61545]: _type = "Task" [ 1206.394924] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.405499] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.410034] env[61545]: DEBUG nova.scheduler.client.report [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1206.687253] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: a5ec3957-4646-4de4-8eac-9f0fbbf8da52] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1206.704625] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1206.704873] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838836', 'volume_id': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'name': 'volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0ae4965-42eb-4286-8cd9-a5c82426f1bf', 'attached_at': '', 'detached_at': '', 'volume_id': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'serial': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1206.705782] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40127473-40c1-458d-9639-9c95198e8791 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.723291] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1290b222-1806-42eb-ac3e-b07504936bcd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.752068] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea/volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1206.752349] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b01d9a1-0a56-47b5-9f57-e5763761abfc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.771391] env[61545]: DEBUG oslo_vmware.api [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1206.771391] env[61545]: value = "task-4256754" [ 1206.771391] env[61545]: _type = "Task" [ 1206.771391] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.779682] env[61545]: DEBUG oslo_vmware.api [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256754, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.906075] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256753, 'name': PowerOffVM_Task, 'duration_secs': 0.263624} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.906378] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1206.907406] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca39205-72d4-478e-bf58-40ee82759305 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.929810] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.778s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.930377] env[61545]: DEBUG nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1206.933384] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.571s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.934014] env[61545]: DEBUG nova.objects.instance [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'resources' on Instance uuid 12aed0d0-b5dd-4f1b-913a-000c06a8eab4 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.936042] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb722a4-0de1-4fce-88c1-995d19f0a2bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.190988] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: f2975097-29a3-46cc-9dea-0c414baff246] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1207.281058] env[61545]: DEBUG oslo_vmware.api [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256754, 'name': ReconfigVM_Task, 'duration_secs': 0.382696} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.281561] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfigured VM instance instance-00000060 to attach disk [datastore1] volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea/volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1207.286515] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a6029fa-c744-4026-9c07-b9b052e4fb0b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.303190] env[61545]: DEBUG oslo_vmware.api [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1207.303190] env[61545]: value = "task-4256755" [ 1207.303190] env[61545]: _type = "Task" [ 1207.303190] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.317354] env[61545]: DEBUG oslo_vmware.api [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.363362] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.363568] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.363748] env[61545]: DEBUG nova.network.neutron [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1207.443943] env[61545]: DEBUG nova.compute.utils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1207.445638] env[61545]: DEBUG nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1207.445810] env[61545]: DEBUG nova.network.neutron [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1207.448452] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1207.448744] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-beabf8ac-41a1-41f8-ab76-e53df05b038d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.457669] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1207.457669] env[61545]: value = "task-4256756" [ 1207.457669] env[61545]: _type = "Task" [ 1207.457669] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.471805] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256756, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.537102] env[61545]: DEBUG nova.policy [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d0d78511dd5408cba4db4e57271b5c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b790c7b2af394de28f7f42ce0d230346', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1207.632587] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a5359f-667d-4c5f-aa22-342a349d9fa1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.642414] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141da779-df8e-40d2-b61f-f0559890e27b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.676644] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6fac56-159b-4be1-93e1-0dcdbd6ae930 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.686908] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a541aea5-9d61-4410-8755-cef2a89acabf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.693646] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 4f713be2-4c38-413b-874d-a39a4c01a1be] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1207.704890] env[61545]: DEBUG nova.compute.provider_tree [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.813604] env[61545]: DEBUG oslo_vmware.api [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256755, 'name': ReconfigVM_Task, 'duration_secs': 0.140086} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.814159] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838836', 'volume_id': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'name': 'volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0ae4965-42eb-4286-8cd9-a5c82426f1bf', 'attached_at': '', 'detached_at': '', 'volume_id': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'serial': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1207.875266] env[61545]: DEBUG nova.network.neutron [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Successfully created port: dbcc9cc9-fdcd-497b-ad2d-743248442109 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1207.948587] env[61545]: DEBUG nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1207.968051] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256756, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.131486] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-189f9616-2a3c-4fd7-964c-63b93549ef08" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.131758] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-189f9616-2a3c-4fd7-964c-63b93549ef08" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.132141] env[61545]: DEBUG nova.objects.instance [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'flavor' on Instance uuid 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.201705] env[61545]: DEBUG nova.network.neutron [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance_info_cache with network_info: [{"id": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "address": "fa:16:3e:c6:7d:49", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdfe-4d", "ovs_interfaceid": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.209022] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 413f3b55-0db1-4331-b19f-5cd6c4eeb48a] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1208.209791] env[61545]: DEBUG nova.scheduler.client.report [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1208.468009] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256756, 'name': CreateSnapshot_Task, 'duration_secs': 0.863372} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.468300] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1208.469085] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354ef195-e4ab-45d2-a919-f2ee452d6d57 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.707164] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.713482] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: ab1779b4-707e-4bd8-adea-940805654e1a] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1208.715444] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.782s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.718470] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.038s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.718470] env[61545]: DEBUG nova.objects.instance [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'resources' on Instance uuid 2129a1eb-4ad7-42ef-9554-6202f7a44f58 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.748664] env[61545]: INFO nova.scheduler.client.report [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted allocations for instance 12aed0d0-b5dd-4f1b-913a-000c06a8eab4 [ 1208.857543] env[61545]: DEBUG nova.objects.instance [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid e0ae4965-42eb-4286-8cd9-a5c82426f1bf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.958036] env[61545]: DEBUG nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1208.986283] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1208.986865] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8f1aa59e-0dce-4e1b-ab62-1b70bd373c18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.995710] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1208.995961] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1208.996171] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1208.996378] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1208.996519] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1208.996668] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1208.996885] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1208.997057] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1208.997277] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1208.997450] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1208.997620] env[61545]: DEBUG nova.virt.hardware [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1208.998553] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65852ef9-f622-42e4-b414-a1ef97199c87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.002682] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1209.002682] env[61545]: value = "task-4256757" [ 1209.002682] env[61545]: _type = "Task" [ 1209.002682] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.013111] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fe964d-e660-4be6-9ced-f93980eafd57 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.021256] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256757, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.049546] env[61545]: DEBUG nova.objects.instance [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'pci_requests' on Instance uuid 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.220120] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: c61ca5f4-78ae-4626-977d-8c17dc12c012] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1209.239562] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f307ed94-76fc-4f92-9581-520f6bc79d58 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.267153] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6007120-41dd-423d-b78b-463aac9ea444 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.270451] env[61545]: DEBUG oslo_concurrency.lockutils [None req-eb09b491-0fdd-4153-b806-4dc906a12256 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "12aed0d0-b5dd-4f1b-913a-000c06a8eab4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.989s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.277357] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance '219981bd-04cd-4253-a15e-eebd4083bfa8' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1209.366926] env[61545]: DEBUG oslo_concurrency.lockutils [None req-967872e9-5cba-4b59-88d4-c967cfccff37 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.271s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.380727] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620de7cc-24f2-4e86-a179-ba02a552f2ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.392122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02e0479-f550-4bb7-8a5d-b848f7c95032 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.424841] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba1c6a2-9466-4bc8-a005-6d9ff9f3e565 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.434832] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd630f5f-2421-4869-ba2b-116093aa9300 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.450152] env[61545]: DEBUG nova.compute.provider_tree [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1209.514150] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256757, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.552324] env[61545]: DEBUG nova.objects.base [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Object Instance<3b4fd643-c536-4da9-b1a3-82cd74d24f3e> lazy-loaded attributes: flavor,pci_requests {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1209.552603] env[61545]: DEBUG nova.network.neutron [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1209.671149] env[61545]: DEBUG nova.policy [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1209.729033] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: a7967300-6760-4310-bf48-00ddcaac3ee8] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1209.786955] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1209.787317] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abc9bc44-0457-4c13-a50b-f6f96b2d212f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.796730] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1209.796730] env[61545]: value = "task-4256758" [ 1209.796730] env[61545]: _type = "Task" [ 1209.796730] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.806066] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.917021] env[61545]: DEBUG nova.compute.manager [req-5ce2ff29-f50a-4b64-b550-d1e54282e02a req-fc69c27d-cd05-429a-93f8-7f0e00e5f6d4 service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Received event network-vif-plugged-dbcc9cc9-fdcd-497b-ad2d-743248442109 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1209.917156] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ce2ff29-f50a-4b64-b550-d1e54282e02a req-fc69c27d-cd05-429a-93f8-7f0e00e5f6d4 service nova] Acquiring lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.917492] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ce2ff29-f50a-4b64-b550-d1e54282e02a req-fc69c27d-cd05-429a-93f8-7f0e00e5f6d4 service nova] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.917769] env[61545]: DEBUG oslo_concurrency.lockutils [req-5ce2ff29-f50a-4b64-b550-d1e54282e02a req-fc69c27d-cd05-429a-93f8-7f0e00e5f6d4 service nova] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.917960] env[61545]: DEBUG nova.compute.manager [req-5ce2ff29-f50a-4b64-b550-d1e54282e02a req-fc69c27d-cd05-429a-93f8-7f0e00e5f6d4 service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] No waiting events found dispatching network-vif-plugged-dbcc9cc9-fdcd-497b-ad2d-743248442109 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1209.918270] env[61545]: WARNING nova.compute.manager [req-5ce2ff29-f50a-4b64-b550-d1e54282e02a req-fc69c27d-cd05-429a-93f8-7f0e00e5f6d4 service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Received unexpected event network-vif-plugged-dbcc9cc9-fdcd-497b-ad2d-743248442109 for instance with vm_state building and task_state spawning. [ 1209.954031] env[61545]: DEBUG nova.network.neutron [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Successfully updated port: dbcc9cc9-fdcd-497b-ad2d-743248442109 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1209.956425] env[61545]: DEBUG nova.scheduler.client.report [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1210.021581] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256757, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.092241] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.092584] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.229655] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 79762f13-2f93-43ba-883b-9437c7732c04] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1210.307063] env[61545]: DEBUG oslo_vmware.api [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256758, 'name': PowerOnVM_Task, 'duration_secs': 0.453152} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.308034] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1210.308158] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc33105-86ea-43da-9068-70fcba6d5640 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance '219981bd-04cd-4253-a15e-eebd4083bfa8' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1210.464042] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "refresh_cache-9a59f45b-727f-45ea-ad33-64fa23aaffe7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.464215] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "refresh_cache-9a59f45b-727f-45ea-ad33-64fa23aaffe7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.464375] env[61545]: DEBUG nova.network.neutron [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1210.466356] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.748s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.490462] env[61545]: INFO nova.scheduler.client.report [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted allocations for instance 2129a1eb-4ad7-42ef-9554-6202f7a44f58 [ 1210.517521] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256757, 'name': CloneVM_Task, 'duration_secs': 1.358593} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.517797] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Created linked-clone VM from snapshot [ 1210.518578] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbacfe0-c537-4738-9465-236957772b2a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.526842] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Uploading image 30fdee25-4339-4402-9519-ec7f05638745 {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1210.556740] env[61545]: DEBUG oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1210.556740] env[61545]: value = "vm-838838" [ 1210.556740] env[61545]: _type = "VirtualMachine" [ 1210.556740] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1210.557132] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ab1250ac-2069-4518-92ad-356ac3a7dbdd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.565345] env[61545]: DEBUG oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lease: (returnval){ [ 1210.565345] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52675c75-07b6-53f1-d34c-70ad43038350" [ 1210.565345] env[61545]: _type = "HttpNfcLease" [ 1210.565345] env[61545]: } obtained for exporting VM: (result){ [ 1210.565345] env[61545]: value = "vm-838838" [ 1210.565345] env[61545]: _type = "VirtualMachine" [ 1210.565345] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1210.565672] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the lease: (returnval){ [ 1210.565672] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52675c75-07b6-53f1-d34c-70ad43038350" [ 1210.565672] env[61545]: _type = "HttpNfcLease" [ 1210.565672] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1210.572987] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1210.572987] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52675c75-07b6-53f1-d34c-70ad43038350" [ 1210.572987] env[61545]: _type = "HttpNfcLease" [ 1210.572987] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1210.581033] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.581314] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.595745] env[61545]: DEBUG nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1210.733822] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 6b22a526-1cd9-4dbe-ac5c-c1ee4e08fb0a] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1210.998809] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c84b275-58cc-46c9-a86c-31a826acb804 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "2129a1eb-4ad7-42ef-9554-6202f7a44f58" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.291s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.007138] env[61545]: DEBUG nova.network.neutron [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1211.075524] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1211.075524] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52675c75-07b6-53f1-d34c-70ad43038350" [ 1211.075524] env[61545]: _type = "HttpNfcLease" [ 1211.075524] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1211.075834] env[61545]: DEBUG oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1211.075834] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52675c75-07b6-53f1-d34c-70ad43038350" [ 1211.075834] env[61545]: _type = "HttpNfcLease" [ 1211.075834] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1211.076566] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb65342-9f5c-4a40-a246-468cdbb9a62d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.084804] env[61545]: INFO nova.compute.manager [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Detaching volume 5e8021d6-c42a-495c-b751-a9d5a4018b58 [ 1211.086584] env[61545]: DEBUG oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ff10b4-6f61-9655-1c4b-e373ad42cd99/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1211.086760] env[61545]: DEBUG oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ff10b4-6f61-9655-1c4b-e373ad42cd99/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1211.171082] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.171422] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.173233] env[61545]: INFO nova.compute.claims [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1211.181714] env[61545]: INFO nova.virt.block_device [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Attempting to driver detach volume 5e8021d6-c42a-495c-b751-a9d5a4018b58 from mountpoint /dev/sdb [ 1211.181943] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1211.182185] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838835', 'volume_id': '5e8021d6-c42a-495c-b751-a9d5a4018b58', 'name': 'volume-5e8021d6-c42a-495c-b751-a9d5a4018b58', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0ae4965-42eb-4286-8cd9-a5c82426f1bf', 'attached_at': '', 'detached_at': '', 'volume_id': '5e8021d6-c42a-495c-b751-a9d5a4018b58', 'serial': '5e8021d6-c42a-495c-b751-a9d5a4018b58'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1211.183744] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba82bea-63b9-4a8b-b8f1-4ca51151d58c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.213482] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e977eac-6a7a-4336-bcb7-1db7df98e783 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.217155] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fee8de8f-78d8-4039-aaa8-7b49aaf8e6d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.226507] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d196f1c0-f9d3-427c-9d5c-0bbecb705fa8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.252524] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: db2d0e21-f6bb-4f61-8d54-e9191de13a59] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1211.260302] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b51c21-4d98-4ba8-a53b-6b4b268cad62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.278122] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] The volume has not been displaced from its original location: [datastore1] volume-5e8021d6-c42a-495c-b751-a9d5a4018b58/volume-5e8021d6-c42a-495c-b751-a9d5a4018b58.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1211.283838] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfiguring VM instance instance-00000060 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1211.290347] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c93b4bb9-469e-4935-9d96-4b8b746195c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.312501] env[61545]: DEBUG oslo_vmware.api [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1211.312501] env[61545]: value = "task-4256760" [ 1211.312501] env[61545]: _type = "Task" [ 1211.312501] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.330027] env[61545]: DEBUG oslo_vmware.api [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256760, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.337595] env[61545]: DEBUG nova.network.neutron [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Updating instance_info_cache with network_info: [{"id": "dbcc9cc9-fdcd-497b-ad2d-743248442109", "address": "fa:16:3e:41:c4:9a", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbcc9cc9-fd", "ovs_interfaceid": "dbcc9cc9-fdcd-497b-ad2d-743248442109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.577570] env[61545]: DEBUG nova.network.neutron [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Successfully updated port: 189f9616-2a3c-4fd7-964c-63b93549ef08 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1211.764511] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 2f8567b1-7291-4705-8ef3-23547eb4860e] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1211.822876] env[61545]: DEBUG oslo_vmware.api [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256760, 'name': ReconfigVM_Task, 'duration_secs': 0.289679} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.823339] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfigured VM instance instance-00000060 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1211.828762] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17b01365-2414-4495-9654-49d794e7f60e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.846807] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "refresh_cache-9a59f45b-727f-45ea-ad33-64fa23aaffe7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.847015] env[61545]: DEBUG nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Instance network_info: |[{"id": "dbcc9cc9-fdcd-497b-ad2d-743248442109", "address": "fa:16:3e:41:c4:9a", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbcc9cc9-fd", "ovs_interfaceid": "dbcc9cc9-fdcd-497b-ad2d-743248442109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1211.849720] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:c4:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a071ecf4-e713-4f97-9271-8c17952f6dee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbcc9cc9-fdcd-497b-ad2d-743248442109', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1211.857757] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating folder: Project (b790c7b2af394de28f7f42ce0d230346). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1211.860771] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae2e1259-a4f9-4acd-a197-66d8cb774522 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.866196] env[61545]: DEBUG oslo_vmware.api [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1211.866196] env[61545]: value = "task-4256761" [ 1211.866196] env[61545]: _type = "Task" [ 1211.866196] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.877990] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created folder: Project (b790c7b2af394de28f7f42ce0d230346) in parent group-v838542. [ 1211.878684] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating folder: Instances. Parent ref: group-v838839. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1211.878684] env[61545]: DEBUG oslo_vmware.api [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.878849] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43841543-a3f4-40a9-a435-9a733bbb5979 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.894096] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created folder: Instances in parent group-v838839. [ 1211.894569] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1211.895140] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1211.896050] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1266098b-005f-4aba-a6b5-5e108b4482e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.921263] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1211.921263] env[61545]: value = "task-4256764" [ 1211.921263] env[61545]: _type = "Task" [ 1211.921263] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.932846] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256764, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.021592] env[61545]: DEBUG nova.compute.manager [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Received event network-changed-dbcc9cc9-fdcd-497b-ad2d-743248442109 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1212.022064] env[61545]: DEBUG nova.compute.manager [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Refreshing instance network info cache due to event network-changed-dbcc9cc9-fdcd-497b-ad2d-743248442109. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1212.022337] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Acquiring lock "refresh_cache-9a59f45b-727f-45ea-ad33-64fa23aaffe7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.022515] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Acquired lock "refresh_cache-9a59f45b-727f-45ea-ad33-64fa23aaffe7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.022793] env[61545]: DEBUG nova.network.neutron [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Refreshing network info cache for port dbcc9cc9-fdcd-497b-ad2d-743248442109 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1212.080341] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.080730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.080875] env[61545]: DEBUG nova.network.neutron [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1212.268104] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 91eeceeb-c11e-414b-8ae6-e68e927f1f1e] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1212.281239] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.282211] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.372365] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef6e6ed-539d-4e16-8209-e9f71babeb79 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.382222] env[61545]: DEBUG oslo_vmware.api [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256761, 'name': ReconfigVM_Task, 'duration_secs': 0.188221} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.384488] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838835', 'volume_id': '5e8021d6-c42a-495c-b751-a9d5a4018b58', 'name': 'volume-5e8021d6-c42a-495c-b751-a9d5a4018b58', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0ae4965-42eb-4286-8cd9-a5c82426f1bf', 'attached_at': '', 'detached_at': '', 'volume_id': '5e8021d6-c42a-495c-b751-a9d5a4018b58', 'serial': '5e8021d6-c42a-495c-b751-a9d5a4018b58'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1212.387783] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c904cdd1-509e-461c-853d-7bf5d684651b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.420218] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cea5e80-a26a-4cb9-be14-9128b1a1b557 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.435127] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79344c5-4d93-4016-a4ef-f446f13f419f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.439153] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256764, 'name': CreateVM_Task, 'duration_secs': 0.356497} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.439449] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1212.440663] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.441157] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.441521] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1212.441906] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b4e2c22-c9c3-4bd7-9fba-f3d6b9983e96 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.455264] env[61545]: DEBUG nova.compute.provider_tree [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.460509] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1212.460509] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d88a5f-48bd-2fba-ad32-72b99fb30854" [ 1212.460509] env[61545]: _type = "Task" [ 1212.460509] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.470289] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d88a5f-48bd-2fba-ad32-72b99fb30854, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.633994] env[61545]: WARNING nova.network.neutron [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] 980f8e73-b8ce-492a-90f5-f43e01dc44cd already exists in list: networks containing: ['980f8e73-b8ce-492a-90f5-f43e01dc44cd']. ignoring it [ 1212.634357] env[61545]: WARNING nova.network.neutron [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] 980f8e73-b8ce-492a-90f5-f43e01dc44cd already exists in list: networks containing: ['980f8e73-b8ce-492a-90f5-f43e01dc44cd']. ignoring it [ 1212.771847] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: b33e29cc-fe26-429a-8799-8d790667cc1d] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1212.785416] env[61545]: DEBUG nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1212.840354] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "219981bd-04cd-4253-a15e-eebd4083bfa8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.840631] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.840928] env[61545]: DEBUG nova.compute.manager [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Going to confirm migration 7 {{(pid=61545) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1212.881187] env[61545]: DEBUG nova.network.neutron [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Updated VIF entry in instance network info cache for port dbcc9cc9-fdcd-497b-ad2d-743248442109. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1212.881720] env[61545]: DEBUG nova.network.neutron [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Updating instance_info_cache with network_info: [{"id": "dbcc9cc9-fdcd-497b-ad2d-743248442109", "address": "fa:16:3e:41:c4:9a", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbcc9cc9-fd", "ovs_interfaceid": "dbcc9cc9-fdcd-497b-ad2d-743248442109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.937375] env[61545]: DEBUG nova.objects.instance [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid e0ae4965-42eb-4286-8cd9-a5c82426f1bf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.960860] env[61545]: DEBUG nova.scheduler.client.report [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1212.978590] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d88a5f-48bd-2fba-ad32-72b99fb30854, 'name': SearchDatastore_Task, 'duration_secs': 0.013047} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.978897] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.979155] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1212.979414] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.979565] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.979750] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1212.980325] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc2680d8-1a1f-4a8f-b872-0ef2539830ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.991336] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1212.991556] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1212.992340] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b21d4206-2555-44b1-ae34-d579b01e6e35 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.998678] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1212.998678] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529342bd-2101-025c-f146-919f974fc883" [ 1212.998678] env[61545]: _type = "Task" [ 1212.998678] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.010340] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529342bd-2101-025c-f146-919f974fc883, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.182958] env[61545]: DEBUG nova.network.neutron [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d2fa1081-8512-43d3-ba49-d739e11278fe", "address": "fa:16:3e:a4:7e:3e", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2fa1081-85", "ovs_interfaceid": "d2fa1081-8512-43d3-ba49-d739e11278fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "189f9616-2a3c-4fd7-964c-63b93549ef08", "address": "fa:16:3e:b4:2b:14", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189f9616-2a", "ovs_interfaceid": "189f9616-2a3c-4fd7-964c-63b93549ef08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.275304] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 97b72809-2a1e-4eda-af82-71cac2d79a64] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1213.309532] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.383514] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.383794] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquired lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.383979] env[61545]: DEBUG nova.network.neutron [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1213.384186] env[61545]: DEBUG nova.objects.instance [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'info_cache' on Instance uuid 219981bd-04cd-4253-a15e-eebd4083bfa8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.386418] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Releasing lock "refresh_cache-9a59f45b-727f-45ea-ad33-64fa23aaffe7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.386891] env[61545]: DEBUG nova.compute.manager [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-vif-plugged-189f9616-2a3c-4fd7-964c-63b93549ef08 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1213.386891] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.386891] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.387801] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.389019] env[61545]: DEBUG nova.compute.manager [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] No waiting events found dispatching network-vif-plugged-189f9616-2a3c-4fd7-964c-63b93549ef08 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1213.389019] env[61545]: WARNING nova.compute.manager [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received unexpected event network-vif-plugged-189f9616-2a3c-4fd7-964c-63b93549ef08 for instance with vm_state active and task_state None. [ 1213.389019] env[61545]: DEBUG nova.compute.manager [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-changed-189f9616-2a3c-4fd7-964c-63b93549ef08 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1213.389019] env[61545]: DEBUG nova.compute.manager [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Refreshing instance network info cache due to event network-changed-189f9616-2a3c-4fd7-964c-63b93549ef08. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1213.389019] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Acquiring lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.470657] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.471089] env[61545]: DEBUG nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1213.474126] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.165s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.475631] env[61545]: INFO nova.compute.claims [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1213.511571] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529342bd-2101-025c-f146-919f974fc883, 'name': SearchDatastore_Task, 'duration_secs': 0.011715} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.512773] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f53c710-b854-4971-b888-e9864e238abb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.519591] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1213.519591] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521213bf-8991-72f0-de70-54acea97a68c" [ 1213.519591] env[61545]: _type = "Task" [ 1213.519591] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.529686] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521213bf-8991-72f0-de70-54acea97a68c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.686438] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.687276] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.687501] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.687810] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Acquired lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.688047] env[61545]: DEBUG nova.network.neutron [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Refreshing network info cache for port 189f9616-2a3c-4fd7-964c-63b93549ef08 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1213.689905] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee6352d-7ecd-4690-9bd7-0fc5861be475 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.710017] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1213.710269] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1213.710471] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1213.710676] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1213.710821] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1213.710970] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1213.711195] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1213.711384] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1213.711581] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1213.711744] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1213.711913] env[61545]: DEBUG nova.virt.hardware [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1213.718441] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfiguring VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1213.719595] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa5f640f-f2ba-4730-b2de-8b0a0ba606f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.737787] env[61545]: DEBUG oslo_vmware.api [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1213.737787] env[61545]: value = "task-4256765" [ 1213.737787] env[61545]: _type = "Task" [ 1213.737787] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.746504] env[61545]: DEBUG oslo_vmware.api [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256765, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.779204] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 7301c541-664f-43ec-8a34-86f38cac22ab] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1213.945577] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f4ba1a92-5ff4-4a6e-8b16-4d26bc1b1748 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.364s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.980529] env[61545]: DEBUG nova.compute.utils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1213.985231] env[61545]: DEBUG nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1213.985231] env[61545]: DEBUG nova.network.neutron [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1214.032125] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521213bf-8991-72f0-de70-54acea97a68c, 'name': SearchDatastore_Task, 'duration_secs': 0.011645} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.032499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.032793] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 9a59f45b-727f-45ea-ad33-64fa23aaffe7/9a59f45b-727f-45ea-ad33-64fa23aaffe7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1214.033126] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39f46d69-414d-4332-a11f-3b62f0d628d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.036829] env[61545]: DEBUG nova.policy [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb00c18cd27541359ae0adf45f5c4171', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa2360863a5f4eff8a88eca0c88fa76d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1214.043658] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1214.043658] env[61545]: value = "task-4256766" [ 1214.043658] env[61545]: _type = "Task" [ 1214.043658] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.052345] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.251419] env[61545]: DEBUG oslo_vmware.api [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.282850] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 855904d8-7eb3-405d-9236-ab4ba9b33940] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1214.486469] env[61545]: DEBUG nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1214.566616] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256766, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.635709] env[61545]: DEBUG nova.network.neutron [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Successfully created port: 4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1214.690190] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d33b1c-50d0-4969-ad73-44af6feadf56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.701240] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9401a752-c01c-4ce1-b0fe-c5b56a159d5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.735249] env[61545]: DEBUG nova.network.neutron [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updated VIF entry in instance network info cache for port 189f9616-2a3c-4fd7-964c-63b93549ef08. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1214.735729] env[61545]: DEBUG nova.network.neutron [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d2fa1081-8512-43d3-ba49-d739e11278fe", "address": "fa:16:3e:a4:7e:3e", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2fa1081-85", "ovs_interfaceid": "d2fa1081-8512-43d3-ba49-d739e11278fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "189f9616-2a3c-4fd7-964c-63b93549ef08", "address": "fa:16:3e:b4:2b:14", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189f9616-2a", "ovs_interfaceid": "189f9616-2a3c-4fd7-964c-63b93549ef08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.741023] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7d568a-e447-48ff-b326-5a0ddb02b304 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.753686] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845d4d08-b766-4b7d-a238-3ad86d221ab3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.758362] env[61545]: DEBUG oslo_vmware.api [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256765, 'name': ReconfigVM_Task, 'duration_secs': 0.641165} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.761715] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.761971] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfigured VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1214.777226] env[61545]: DEBUG nova.compute.provider_tree [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.786305] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 5a610b1c-df03-4ca6-83ff-ba651edcc8d0] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1214.961171] env[61545]: DEBUG nova.network.neutron [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance_info_cache with network_info: [{"id": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "address": "fa:16:3e:c6:7d:49", "network": {"id": "9b47b98c-2943-46e7-9f3d-9c58f043b319", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2131420986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68a860104885480d9da472bc969ba6d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db1f7867-8524-469c-ab47-d2c9e2751d98", "external-id": "nsx-vlan-transportzone-130", "segmentation_id": 130, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdfe-4d", "ovs_interfaceid": "f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.057472] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661252} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.057785] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 9a59f45b-727f-45ea-ad33-64fa23aaffe7/9a59f45b-727f-45ea-ad33-64fa23aaffe7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1215.058053] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1215.058695] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cf0ecd1-ed96-448a-9b0b-820f87bbddc4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.066382] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1215.066382] env[61545]: value = "task-4256767" [ 1215.066382] env[61545]: _type = "Task" [ 1215.066382] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.078551] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256767, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.241557] env[61545]: DEBUG oslo_concurrency.lockutils [req-8521d630-b680-4bd2-85d2-3397a16b5ab6 req-cc0c8b34-612a-4d3e-bd7e-174eb55801eb service nova] Releasing lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.267336] env[61545]: DEBUG oslo_concurrency.lockutils [None req-569b3954-9b9c-4e52-8091-163936a69e03 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-189f9616-2a3c-4fd7-964c-63b93549ef08" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.135s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.281123] env[61545]: DEBUG nova.scheduler.client.report [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.289832] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 04cdc45a-a01a-4c9e-816e-c7b6b5f6b81b] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1215.464247] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Releasing lock "refresh_cache-219981bd-04cd-4253-a15e-eebd4083bfa8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.464563] env[61545]: DEBUG nova.objects.instance [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lazy-loading 'migration_context' on Instance uuid 219981bd-04cd-4253-a15e-eebd4083bfa8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.501153] env[61545]: DEBUG nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1215.530154] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1215.530430] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1215.530600] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1215.530789] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1215.530936] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1215.531106] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1215.531344] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1215.531585] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1215.531777] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1215.531946] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1215.532131] env[61545]: DEBUG nova.virt.hardware [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1215.533048] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d70e9c2-5c83-496f-a152-370195f9c097 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.543381] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d306a6-1bb1-4486-8db9-495441b17943 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.575867] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256767, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.212959} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.576166] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1215.576958] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da7d2ac-99a7-4129-8044-fbc121802935 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.599985] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 9a59f45b-727f-45ea-ad33-64fa23aaffe7/9a59f45b-727f-45ea-ad33-64fa23aaffe7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1215.600375] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-930d1e83-f761-4c9f-91a5-79ad230dd72f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.623582] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1215.623582] env[61545]: value = "task-4256768" [ 1215.623582] env[61545]: _type = "Task" [ 1215.623582] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.633423] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.786457] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.787025] env[61545]: DEBUG nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1215.794224] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: fed2c050-74e7-48f1-8a19-7c58e26d2159] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1215.967580] env[61545]: DEBUG nova.objects.base [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Object Instance<219981bd-04cd-4253-a15e-eebd4083bfa8> lazy-loaded attributes: info_cache,migration_context {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1215.968602] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f865d8c6-8121-438f-9ed6-64998df44661 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.988282] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37538c5d-062f-4417-9b19-de8f6c36bf35 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.994407] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1215.994407] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c93555-6d9a-4559-cc00-f49e78d55efd" [ 1215.994407] env[61545]: _type = "Task" [ 1215.994407] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.003303] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c93555-6d9a-4559-cc00-f49e78d55efd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.133877] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256768, 'name': ReconfigVM_Task, 'duration_secs': 0.344989} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.134187] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 9a59f45b-727f-45ea-ad33-64fa23aaffe7/9a59f45b-727f-45ea-ad33-64fa23aaffe7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1216.134867] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b558110-4380-453f-ace0-cf7b2177ad05 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.142895] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1216.142895] env[61545]: value = "task-4256769" [ 1216.142895] env[61545]: _type = "Task" [ 1216.142895] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.152393] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256769, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.292479] env[61545]: DEBUG nova.compute.utils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1216.294029] env[61545]: DEBUG nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1216.294215] env[61545]: DEBUG nova.network.neutron [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1216.297624] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: fff833ad-55af-4702-859b-05f94cac18c8] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1216.373021] env[61545]: DEBUG nova.policy [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7a70fb8ea2d498688688f7e51cf4bac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45efa52890714522b3058b7144b42a89', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1216.506698] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c93555-6d9a-4559-cc00-f49e78d55efd, 'name': SearchDatastore_Task, 'duration_secs': 0.007607} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.507219] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.507314] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.653806] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256769, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.798284] env[61545]: DEBUG nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1216.801765] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 665db895-52ce-4e7c-9a78-86db5b695534] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1216.812809] env[61545]: DEBUG nova.network.neutron [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Successfully created port: ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1216.924334] env[61545]: DEBUG nova.network.neutron [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Successfully updated port: 4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1216.987858] env[61545]: DEBUG nova.compute.manager [req-4b789430-70c2-40e9-b60f-be555595e0d3 req-a7ec3db8-3146-4640-8ed6-a0e50a4bc199 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Received event network-vif-plugged-4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1216.988502] env[61545]: DEBUG oslo_concurrency.lockutils [req-4b789430-70c2-40e9-b60f-be555595e0d3 req-a7ec3db8-3146-4640-8ed6-a0e50a4bc199 service nova] Acquiring lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.988725] env[61545]: DEBUG oslo_concurrency.lockutils [req-4b789430-70c2-40e9-b60f-be555595e0d3 req-a7ec3db8-3146-4640-8ed6-a0e50a4bc199 service nova] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.988897] env[61545]: DEBUG oslo_concurrency.lockutils [req-4b789430-70c2-40e9-b60f-be555595e0d3 req-a7ec3db8-3146-4640-8ed6-a0e50a4bc199 service nova] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.991138] env[61545]: DEBUG nova.compute.manager [req-4b789430-70c2-40e9-b60f-be555595e0d3 req-a7ec3db8-3146-4640-8ed6-a0e50a4bc199 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] No waiting events found dispatching network-vif-plugged-4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1216.991382] env[61545]: WARNING nova.compute.manager [req-4b789430-70c2-40e9-b60f-be555595e0d3 req-a7ec3db8-3146-4640-8ed6-a0e50a4bc199 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Received unexpected event network-vif-plugged-4bd4ffa1-26c1-49ba-b154-0c04a3294c9d for instance with vm_state building and task_state spawning. [ 1217.139016] env[61545]: DEBUG nova.compute.manager [req-9ed9e0e2-083b-4019-962b-ad00c4a830b7 req-60c10810-1fc7-4671-b453-82de505ba3e6 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Received event network-changed-4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1217.139249] env[61545]: DEBUG nova.compute.manager [req-9ed9e0e2-083b-4019-962b-ad00c4a830b7 req-60c10810-1fc7-4671-b453-82de505ba3e6 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Refreshing instance network info cache due to event network-changed-4bd4ffa1-26c1-49ba-b154-0c04a3294c9d. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1217.139506] env[61545]: DEBUG oslo_concurrency.lockutils [req-9ed9e0e2-083b-4019-962b-ad00c4a830b7 req-60c10810-1fc7-4671-b453-82de505ba3e6 service nova] Acquiring lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.139607] env[61545]: DEBUG oslo_concurrency.lockutils [req-9ed9e0e2-083b-4019-962b-ad00c4a830b7 req-60c10810-1fc7-4671-b453-82de505ba3e6 service nova] Acquired lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1217.139770] env[61545]: DEBUG nova.network.neutron [req-9ed9e0e2-083b-4019-962b-ad00c4a830b7 req-60c10810-1fc7-4671-b453-82de505ba3e6 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Refreshing network info cache for port 4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1217.154858] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256769, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.185299] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b7573e-418a-4e23-93d1-887a8c315a7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.193360] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3dc125-1b1d-4bfe-b9d7-6b673f94e890 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.226094] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37376c96-2472-419e-bb23-8bb0b2f5766a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.234715] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766b71dd-84ed-45d2-be25-7ba12aca4dac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.251948] env[61545]: DEBUG nova.compute.provider_tree [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.305225] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: d980f421-03b5-4b0e-b547-a33031356d55] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1217.427269] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.609742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.610075] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.656880] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256769, 'name': Rename_Task, 'duration_secs': 1.169692} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.657191] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1217.657450] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec6cc4a4-7026-44b7-9ff5-f4c3edac95df {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.666639] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1217.666639] env[61545]: value = "task-4256770" [ 1217.666639] env[61545]: _type = "Task" [ 1217.666639] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.679010] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.706761] env[61545]: DEBUG nova.network.neutron [req-9ed9e0e2-083b-4019-962b-ad00c4a830b7 req-60c10810-1fc7-4671-b453-82de505ba3e6 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1217.755969] env[61545]: DEBUG nova.scheduler.client.report [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1217.812530] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 5ba53915-ab57-493e-b2e1-7f3d1b3845ee] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1217.816430] env[61545]: DEBUG nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1217.820277] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-d2fa1081-8512-43d3-ba49-d739e11278fe" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.820831] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-d2fa1081-8512-43d3-ba49-d739e11278fe" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.864357] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1217.864892] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1217.865125] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1217.865276] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1217.865426] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1217.865571] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1217.865893] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1217.866040] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1217.866217] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1217.866384] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1217.866560] env[61545]: DEBUG nova.virt.hardware [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1217.867481] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258938ad-744a-4a45-88ee-d76c9c8403e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.877610] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fe8e36-54f1-4b8f-ad59-467e8bd250dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.897408] env[61545]: DEBUG nova.network.neutron [req-9ed9e0e2-083b-4019-962b-ad00c4a830b7 req-60c10810-1fc7-4671-b453-82de505ba3e6 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.113493] env[61545]: INFO nova.compute.manager [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Detaching volume c823d33f-da1f-4f2b-bda1-6e5c472699ea [ 1218.152517] env[61545]: INFO nova.virt.block_device [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Attempting to driver detach volume c823d33f-da1f-4f2b-bda1-6e5c472699ea from mountpoint /dev/sdc [ 1218.152760] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1218.152963] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838836', 'volume_id': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'name': 'volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0ae4965-42eb-4286-8cd9-a5c82426f1bf', 'attached_at': '', 'detached_at': '', 'volume_id': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'serial': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1218.153916] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a6583d-109e-4290-a051-29089da181f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.181968] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4985ce1c-e0c8-457b-ad93-176ab16b491f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.190776] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256770, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.193273] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae22f24d-f623-437d-8007-e8d01f523673 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.215810] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7d6fe5-cc56-4466-b683-f33f31757e26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.234870] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] The volume has not been displaced from its original location: [datastore1] volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea/volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1218.240253] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfiguring VM instance instance-00000060 to detach disk 2002 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1218.240689] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71f5f0b0-fbf2-4ade-87e7-af72503d3416 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.259728] env[61545]: DEBUG oslo_vmware.api [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1218.259728] env[61545]: value = "task-4256771" [ 1218.259728] env[61545]: _type = "Task" [ 1218.259728] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.272794] env[61545]: DEBUG oslo_vmware.api [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256771, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.326235] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 578ce929-99fd-47ae-8275-e4ac9abe8d49] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1218.329720] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.329912] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.330925] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ab7e0a-64e5-4658-a44b-3e3929222c14 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.358215] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfa5b46-98b6-4b5e-894a-20a4b6626f95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.391265] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfiguring VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1218.392165] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cb8c0b6-762c-4e92-b793-2b65d43be697 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.412950] env[61545]: DEBUG nova.network.neutron [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Successfully updated port: ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1218.414763] env[61545]: DEBUG oslo_concurrency.lockutils [req-9ed9e0e2-083b-4019-962b-ad00c4a830b7 req-60c10810-1fc7-4671-b453-82de505ba3e6 service nova] Releasing lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.415882] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.416128] env[61545]: DEBUG nova.network.neutron [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1218.425552] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1218.425552] env[61545]: value = "task-4256772" [ 1218.425552] env[61545]: _type = "Task" [ 1218.425552] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.437227] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.688305] env[61545]: DEBUG oslo_vmware.api [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256770, 'name': PowerOnVM_Task, 'duration_secs': 0.557675} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.688476] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1218.688573] env[61545]: INFO nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Took 9.73 seconds to spawn the instance on the hypervisor. [ 1218.688755] env[61545]: DEBUG nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1218.689599] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe896d4f-3946-4d0f-b4ff-3cf0947b2e41 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.772281] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.265s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.775330] env[61545]: DEBUG oslo_vmware.api [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256771, 'name': ReconfigVM_Task, 'duration_secs': 0.339678} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.775653] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Reconfigured VM instance instance-00000060 to detach disk 2002 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1218.780973] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71c51ab2-0ffa-43ca-bdac-5d4783921072 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.798056] env[61545]: DEBUG oslo_vmware.api [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1218.798056] env[61545]: value = "task-4256773" [ 1218.798056] env[61545]: _type = "Task" [ 1218.798056] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.808054] env[61545]: DEBUG oslo_vmware.api [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256773, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.830474] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 73df6df0-ead6-49cd-8b0a-5e95acfc7e15] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1218.917511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.917511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.917511] env[61545]: DEBUG nova.network.neutron [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1218.937655] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.972439] env[61545]: DEBUG nova.network.neutron [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1219.167073] env[61545]: DEBUG nova.compute.manager [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received event network-vif-plugged-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1219.167129] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.167342] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.167560] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.167761] env[61545]: DEBUG nova.compute.manager [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] No waiting events found dispatching network-vif-plugged-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1219.167962] env[61545]: WARNING nova.compute.manager [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received unexpected event network-vif-plugged-ff62e994-2e58-433b-884f-5b4fa7639d6b for instance with vm_state building and task_state spawning. [ 1219.168199] env[61545]: DEBUG nova.compute.manager [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received event network-changed-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1219.168474] env[61545]: DEBUG nova.compute.manager [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Refreshing instance network info cache due to event network-changed-ff62e994-2e58-433b-884f-5b4fa7639d6b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1219.168693] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] Acquiring lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.171385] env[61545]: DEBUG nova.network.neutron [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Updating instance_info_cache with network_info: [{"id": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "address": "fa:16:3e:b0:54:e7", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd4ffa1-26", "ovs_interfaceid": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.209246] env[61545]: INFO nova.compute.manager [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Took 14.90 seconds to build instance. [ 1219.310733] env[61545]: DEBUG oslo_vmware.api [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256773, 'name': ReconfigVM_Task, 'duration_secs': 0.163731} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.311027] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838836', 'volume_id': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'name': 'volume-c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0ae4965-42eb-4286-8cd9-a5c82426f1bf', 'attached_at': '', 'detached_at': '', 'volume_id': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea', 'serial': 'c823d33f-da1f-4f2b-bda1-6e5c472699ea'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1219.334706] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 4cdcf2c4-c87f-4bce-8f77-9da42a1b4a42] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1219.340283] env[61545]: INFO nova.scheduler.client.report [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted allocation for migration b21cc98f-d8c9-498e-8fbd-31e7f1478c55 [ 1219.437926] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.461080] env[61545]: DEBUG nova.network.neutron [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1219.636377] env[61545]: DEBUG nova.network.neutron [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [{"id": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "address": "fa:16:3e:09:25:39", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff62e994-2e", "ovs_interfaceid": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.674645] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.675027] env[61545]: DEBUG nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Instance network_info: |[{"id": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "address": "fa:16:3e:b0:54:e7", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd4ffa1-26", "ovs_interfaceid": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1219.675448] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:54:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bd4ffa1-26c1-49ba-b154-0c04a3294c9d', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1219.683334] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1219.683575] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1219.683799] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eeacd22e-ee03-460b-b600-4371e01cae10 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.705304] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1219.705304] env[61545]: value = "task-4256774" [ 1219.705304] env[61545]: _type = "Task" [ 1219.705304] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.713916] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0d2f308a-bd4e-4809-ab10-13ea2c51ef20 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.411s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.714156] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256774, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.838440] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 8ab168cb-b0a9-403c-bdb5-b96c6d319baf] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1219.846522] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.006s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.866101] env[61545]: DEBUG nova.objects.instance [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'flavor' on Instance uuid e0ae4965-42eb-4286-8cd9-a5c82426f1bf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.938747] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.139699] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.140070] env[61545]: DEBUG nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Instance network_info: |[{"id": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "address": "fa:16:3e:09:25:39", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff62e994-2e", "ovs_interfaceid": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1220.140445] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] Acquired lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.140660] env[61545]: DEBUG nova.network.neutron [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Refreshing network info cache for port ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1220.141904] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:25:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff62e994-2e58-433b-884f-5b4fa7639d6b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1220.149585] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1220.150416] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1220.150698] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5b190ea-691e-45e4-91b3-f895a6f75f58 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.172036] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1220.172036] env[61545]: value = "task-4256775" [ 1220.172036] env[61545]: _type = "Task" [ 1220.172036] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.180470] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256775, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.216018] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256774, 'name': CreateVM_Task, 'duration_secs': 0.434801} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.216216] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1220.216951] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.217148] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.217552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1220.217870] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6089d7ea-e5eb-4e31-8cb8-29593f67a593 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.223377] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1220.223377] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e9ffbf-5258-d431-4275-f4805fb3e31c" [ 1220.223377] env[61545]: _type = "Task" [ 1220.223377] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.232139] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e9ffbf-5258-d431-4275-f4805fb3e31c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.344605] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 1e5be92c-d727-4515-9e16-85ade2719455] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1220.441468] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.685599] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256775, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.734451] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e9ffbf-5258-d431-4275-f4805fb3e31c, 'name': SearchDatastore_Task, 'duration_secs': 0.012248} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.734811] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.735074] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1220.735299] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.735463] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.735742] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1220.737938] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-495ee711-7ecd-4bce-9d79-55d509bc418a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.746835] env[61545]: DEBUG oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ff10b4-6f61-9655-1c4b-e373ad42cd99/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1220.747805] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c8da6d-0dda-4506-8484-3ba4f84d77f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.751975] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1220.752191] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1220.753351] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e691b5d-d490-4423-9c8e-512a27ac8b35 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.757699] env[61545]: DEBUG oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ff10b4-6f61-9655-1c4b-e373ad42cd99/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1220.757914] env[61545]: ERROR oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ff10b4-6f61-9655-1c4b-e373ad42cd99/disk-0.vmdk due to incomplete transfer. [ 1220.758588] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0b2cc9b9-8d05-4089-97f2-f897b25ad2fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.761377] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1220.761377] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526ae2fb-353b-3d7c-54f6-6f9bf714ada3" [ 1220.761377] env[61545]: _type = "Task" [ 1220.761377] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.768370] env[61545]: DEBUG oslo_vmware.rw_handles [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ff10b4-6f61-9655-1c4b-e373ad42cd99/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1220.768573] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Uploaded image 30fdee25-4339-4402-9519-ec7f05638745 to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1220.771143] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1220.771758] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cf83e06a-38a9-4de6-af10-5c09eda0f6ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.777849] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]526ae2fb-353b-3d7c-54f6-6f9bf714ada3, 'name': SearchDatastore_Task, 'duration_secs': 0.010267} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.779705] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1220.779705] env[61545]: value = "task-4256776" [ 1220.779705] env[61545]: _type = "Task" [ 1220.779705] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.779918] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0275be7d-02bd-4cd8-8b84-16b62ea344fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.788815] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1220.788815] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c88056-3d6b-78b8-4222-9f616322f7e9" [ 1220.788815] env[61545]: _type = "Task" [ 1220.788815] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.794500] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256776, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.803310] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c88056-3d6b-78b8-4222-9f616322f7e9, 'name': SearchDatastore_Task, 'duration_secs': 0.0097} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.803574] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.804153] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fde00c6e-29b5-4b99-944a-c0404e4f2fae/fde00c6e-29b5-4b99-944a-c0404e4f2fae.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1220.804153] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d7e8fe4-c8bf-4ccf-bc91-4ee2742a9560 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.812114] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1220.812114] env[61545]: value = "task-4256777" [ 1220.812114] env[61545]: _type = "Task" [ 1220.812114] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.821510] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256777, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.848014] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: f9c9c447-e676-4143-b329-fb6d71bcd553] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1220.878295] env[61545]: DEBUG oslo_concurrency.lockutils [None req-56bc2e63-db5a-4b47-af18-f8f689d27543 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.266s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.939584] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.033932] env[61545]: DEBUG nova.network.neutron [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updated VIF entry in instance network info cache for port ff62e994-2e58-433b-884f-5b4fa7639d6b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1221.034428] env[61545]: DEBUG nova.network.neutron [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [{"id": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "address": "fa:16:3e:09:25:39", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff62e994-2e", "ovs_interfaceid": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.186114] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256775, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.295064] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256776, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.323077] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256777, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486313} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.323375] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fde00c6e-29b5-4b99-944a-c0404e4f2fae/fde00c6e-29b5-4b99-944a-c0404e4f2fae.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1221.323596] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1221.323860] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60415137-f0e5-4c83-94d8-3bf62bb90cc8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.331592] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1221.331592] env[61545]: value = "task-4256778" [ 1221.331592] env[61545]: _type = "Task" [ 1221.331592] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.339613] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.351436] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: c2bb4ea0-e9fb-4198-80fa-acfd25fb226d] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1221.373653] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "219981bd-04cd-4253-a15e-eebd4083bfa8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.373775] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.373988] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.374185] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.374356] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.376854] env[61545]: INFO nova.compute.manager [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Terminating instance [ 1221.439482] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.537692] env[61545]: DEBUG oslo_concurrency.lockutils [req-ea3eea13-1495-412c-b27c-a864c53d757c req-b2f8c2ce-59ed-43ee-bb5d-055cde9f920e service nova] Releasing lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.647619] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.647916] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.648170] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.648417] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.648583] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.650809] env[61545]: INFO nova.compute.manager [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Terminating instance [ 1221.670278] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.670565] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.684270] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256775, 'name': CreateVM_Task, 'duration_secs': 1.35252} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.684431] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1221.685120] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.685285] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.685604] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1221.686284] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59fb47fb-0822-400e-82cf-c33ac2cd6a7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.691552] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1221.691552] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d280b1-023a-5fcc-09ff-3a2879b52a1c" [ 1221.691552] env[61545]: _type = "Task" [ 1221.691552] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.700211] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d280b1-023a-5fcc-09ff-3a2879b52a1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.794642] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256776, 'name': Destroy_Task, 'duration_secs': 0.82436} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.794953] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Destroyed the VM [ 1221.795241] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1221.795544] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5833ca88-ed0c-4450-9385-b1427a85ee30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.803994] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1221.803994] env[61545]: value = "task-4256779" [ 1221.803994] env[61545]: _type = "Task" [ 1221.803994] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.813871] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256779, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.842314] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075756} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.842602] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1221.843487] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002d4b1d-fe10-4330-a6bc-9b6b3407ca51 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.857991] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 19aabdc5-8d2f-4adb-aea0-34ce4482677a] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1221.870515] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] fde00c6e-29b5-4b99-944a-c0404e4f2fae/fde00c6e-29b5-4b99-944a-c0404e4f2fae.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1221.871240] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c8c3ae3-240d-4ef8-993b-5f0ba6ae9a54 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.891131] env[61545]: DEBUG nova.compute.manager [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.891377] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.892875] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0542be24-afc9-48b3-b7d5-1bdfe0c3fd02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.901709] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.903222] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58ba604b-65d3-4d0e-9c8c-6007734428aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.905314] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1221.905314] env[61545]: value = "task-4256780" [ 1221.905314] env[61545]: _type = "Task" [ 1221.905314] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.911361] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1221.911361] env[61545]: value = "task-4256781" [ 1221.911361] env[61545]: _type = "Task" [ 1221.911361] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.919523] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256780, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.925502] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.941341] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.155911] env[61545]: DEBUG nova.compute.manager [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1222.156215] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1222.157205] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6eb234-a617-4a52-9f0d-caa28d8ca917 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.165789] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1222.166052] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c9e3489-a1a8-46fa-9d45-8192cc01dc6e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.173318] env[61545]: DEBUG nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1222.176246] env[61545]: DEBUG oslo_vmware.api [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1222.176246] env[61545]: value = "task-4256782" [ 1222.176246] env[61545]: _type = "Task" [ 1222.176246] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.187107] env[61545]: DEBUG oslo_vmware.api [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.203697] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d280b1-023a-5fcc-09ff-3a2879b52a1c, 'name': SearchDatastore_Task, 'duration_secs': 0.010518} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.203982] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.204230] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1222.204627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.204627] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.204811] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1222.205125] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2a9c5eb-5dd8-442d-a545-94e5e0727fd4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.216476] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1222.216613] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1222.217499] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d25350af-060f-40b2-b970-9bd8853d267d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.225583] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1222.225583] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cbf502-5418-3931-1ad8-2b766db797ee" [ 1222.225583] env[61545]: _type = "Task" [ 1222.225583] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.236345] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cbf502-5418-3931-1ad8-2b766db797ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.314163] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256779, 'name': RemoveSnapshot_Task, 'duration_secs': 0.50285} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.314472] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1222.314703] env[61545]: DEBUG nova.compute.manager [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1222.315585] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd696791-3c38-4dd3-9b03-2e939150f2fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.373052] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: b1277c3b-cd7b-43be-9eff-640145dde5e5] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1222.418134] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256780, 'name': ReconfigVM_Task, 'duration_secs': 0.400585} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.418864] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Reconfigured VM instance instance-0000006b to attach disk [datastore2] fde00c6e-29b5-4b99-944a-c0404e4f2fae/fde00c6e-29b5-4b99-944a-c0404e4f2fae.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1222.419480] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9dfb6a47-8aa7-4903-85aa-51ed37c3d2df {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.424105] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256781, 'name': PowerOffVM_Task, 'duration_secs': 0.21914} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.424668] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.424852] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.426033] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9df68067-4fa2-41ef-adaf-14527f10a97d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.429141] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1222.429141] env[61545]: value = "task-4256783" [ 1222.429141] env[61545]: _type = "Task" [ 1222.429141] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.443409] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256783, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.446935] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.497035] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.497287] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.497548] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleting the datastore file [datastore2] 219981bd-04cd-4253-a15e-eebd4083bfa8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.497864] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1940010-da06-4156-8269-f0c59ca4a96d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.505684] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for the task: (returnval){ [ 1222.505684] env[61545]: value = "task-4256785" [ 1222.505684] env[61545]: _type = "Task" [ 1222.505684] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.514635] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.692029] env[61545]: DEBUG oslo_vmware.api [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256782, 'name': PowerOffVM_Task, 'duration_secs': 0.316612} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.692159] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.692262] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.692847] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cac631e9-0012-4aba-8171-398a5aa7754b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.701351] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.701653] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.703241] env[61545]: INFO nova.compute.claims [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1222.737478] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52cbf502-5418-3931-1ad8-2b766db797ee, 'name': SearchDatastore_Task, 'duration_secs': 0.010255} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.738366] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc8d9155-087c-445e-b8b6-666c3b3f0a56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.744647] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1222.744647] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a49142-17ed-1083-891f-749145d3de59" [ 1222.744647] env[61545]: _type = "Task" [ 1222.744647] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.753374] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a49142-17ed-1083-891f-749145d3de59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.828957] env[61545]: INFO nova.compute.manager [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Shelve offloading [ 1222.876065] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 1537dbf0-d1b6-410f-8333-788761dd24d7] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1222.943881] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256783, 'name': Rename_Task, 'duration_secs': 0.163959} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.948944] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1222.949305] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.949555] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.949739] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.949909] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Deleting the datastore file [datastore2] e0ae4965-42eb-4286-8cd9-a5c82426f1bf {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.950153] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-668a07f3-1a22-4fc1-9e10-96cd0d71f641 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.952065] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd2fa849-e683-475a-a471-583f7a700921 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.958466] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1222.958466] env[61545]: value = "task-4256787" [ 1222.958466] env[61545]: _type = "Task" [ 1222.958466] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.959815] env[61545]: DEBUG oslo_vmware.api [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for the task: (returnval){ [ 1222.959815] env[61545]: value = "task-4256788" [ 1222.959815] env[61545]: _type = "Task" [ 1222.959815] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.972296] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.975022] env[61545]: DEBUG oslo_vmware.api [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.015734] env[61545]: DEBUG oslo_vmware.api [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Task: {'id': task-4256785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231992} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.016058] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.016255] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.016456] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.016620] env[61545]: INFO nova.compute.manager [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1223.016867] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1223.017072] env[61545]: DEBUG nova.compute.manager [-] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1223.017173] env[61545]: DEBUG nova.network.neutron [-] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1223.255713] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a49142-17ed-1083-891f-749145d3de59, 'name': SearchDatastore_Task, 'duration_secs': 0.009956} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.255978] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.256271] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc/40bade64-b16b-4a33-a9ea-18f80a32c6bc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1223.256538] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33156ce5-e5b2-49dc-a20a-809deb88ae0f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.264595] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1223.264595] env[61545]: value = "task-4256789" [ 1223.264595] env[61545]: _type = "Task" [ 1223.264595] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.273751] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.333147] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1223.333465] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-383a14e9-1e09-4486-bdb4-9d6dd5185560 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.341918] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1223.341918] env[61545]: value = "task-4256790" [ 1223.341918] env[61545]: _type = "Task" [ 1223.341918] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.355874] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1223.356135] env[61545]: DEBUG nova.compute.manager [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1223.357450] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313bf81e-383a-4f11-bade-a5dcfb283901 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.369257] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.369257] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.369257] env[61545]: DEBUG nova.network.neutron [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1223.379237] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 56680678-c844-4dd2-8541-d50de83b22d7] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1223.432656] env[61545]: DEBUG nova.compute.manager [req-1b4c5a06-2ea0-47ad-ac9a-c1b27477271b req-92bc8bd7-0c16-4942-b5c0-2572be3683af service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Received event network-vif-deleted-f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1223.433254] env[61545]: INFO nova.compute.manager [req-1b4c5a06-2ea0-47ad-ac9a-c1b27477271b req-92bc8bd7-0c16-4942-b5c0-2572be3683af service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Neutron deleted interface f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99; detaching it from the instance and deleting it from the info cache [ 1223.433254] env[61545]: DEBUG nova.network.neutron [req-1b4c5a06-2ea0-47ad-ac9a-c1b27477271b req-92bc8bd7-0c16-4942-b5c0-2572be3683af service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.444865] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.475422] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256787, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.478913] env[61545]: DEBUG oslo_vmware.api [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Task: {'id': task-4256788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165114} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.479396] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.479684] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.479778] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.479996] env[61545]: INFO nova.compute.manager [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1223.480338] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1223.480640] env[61545]: DEBUG nova.compute.manager [-] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1223.480723] env[61545]: DEBUG nova.network.neutron [-] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1223.785889] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256789, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.883607] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: bea2e59c-02fd-4d6d-8f10-b0e265fa87a2] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1223.906411] env[61545]: DEBUG nova.network.neutron [-] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.919146] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495aaef9-1d7c-4653-ad60-d891d6a07cb7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.933283] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2255b2e9-382d-43da-a994-4344a44a46c1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.937174] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-520ba846-2e49-4669-bc60-c764038809c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.949191] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.980251] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbecb89e-e033-441c-b8d7-a1961262d3e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.995739] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafbb56f-4a33-4370-8346-4010ed95fb2d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.009110] env[61545]: DEBUG oslo_vmware.api [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256787, 'name': PowerOnVM_Task, 'duration_secs': 0.699991} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.009110] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1224.009279] env[61545]: INFO nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Took 8.51 seconds to spawn the instance on the hypervisor. [ 1224.009456] env[61545]: DEBUG nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1224.010910] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e217ce8e-23d2-4428-ae26-c4291a9352a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.027632] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239cf62e-a275-4df7-9317-fd9c3eca8540 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.030618] env[61545]: DEBUG nova.compute.manager [req-1b4c5a06-2ea0-47ad-ac9a-c1b27477271b req-92bc8bd7-0c16-4942-b5c0-2572be3683af service nova] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Detach interface failed, port_id=f28dcdfe-4dc7-43c7-bcf1-59fb2da6cf99, reason: Instance 219981bd-04cd-4253-a15e-eebd4083bfa8 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1224.046766] env[61545]: DEBUG nova.compute.provider_tree [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1224.282536] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256789, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556974} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.282672] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc/40bade64-b16b-4a33-a9ea-18f80a32c6bc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1224.282875] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1224.283203] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d92c1563-22a4-4bf0-9011-60ab943c3001 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.293443] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1224.293443] env[61545]: value = "task-4256791" [ 1224.293443] env[61545]: _type = "Task" [ 1224.293443] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.312189] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256791, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.333245] env[61545]: DEBUG nova.network.neutron [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating instance_info_cache with network_info: [{"id": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "address": "fa:16:3e:bf:70:d6", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06d5cf8-45", "ovs_interfaceid": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.390266] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: a84d7a3d-2f7e-459d-94ca-7caa32b7a472] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1224.414605] env[61545]: INFO nova.compute.manager [-] [instance: 219981bd-04cd-4253-a15e-eebd4083bfa8] Took 1.40 seconds to deallocate network for instance. [ 1224.454603] env[61545]: DEBUG oslo_vmware.api [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256772, 'name': ReconfigVM_Task, 'duration_secs': 5.97304} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.455037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.455355] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfigured VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1224.554669] env[61545]: DEBUG nova.scheduler.client.report [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1224.563349] env[61545]: INFO nova.compute.manager [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Took 13.42 seconds to build instance. [ 1224.807139] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256791, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.290584} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.807399] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1224.808257] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dd0598-e7a6-4b0c-87fd-4c9555805467 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.830387] env[61545]: DEBUG nova.network.neutron [-] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.843637] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc/40bade64-b16b-4a33-a9ea-18f80a32c6bc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1224.847639] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.850109] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef9d8516-0fc3-4568-8ec6-cc091f2f000f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.875219] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1224.875219] env[61545]: value = "task-4256792" [ 1224.875219] env[61545]: _type = "Task" [ 1224.875219] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.887864] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256792, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.894322] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: f7a16153-2ef7-4be4-90a2-5ad6616203f8] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1224.923811] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.061589] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.062109] env[61545]: DEBUG nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1225.065303] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.142s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.065583] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.067714] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e85fc20d-a89b-4da9-ab89-22b46d2e5516 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.975s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.103571] env[61545]: INFO nova.scheduler.client.report [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Deleted allocations for instance 219981bd-04cd-4253-a15e-eebd4083bfa8 [ 1225.346462] env[61545]: INFO nova.compute.manager [-] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Took 1.87 seconds to deallocate network for instance. [ 1225.368920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.369112] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.369398] env[61545]: DEBUG nova.network.neutron [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1225.398164] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256792, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.402018] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 63b3a0ac-6077-4b07-bff0-81e5faa6a2ae] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1225.465248] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1225.466477] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b33f302-f469-41d5-8c4d-3eb8a2c65384 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.475227] env[61545]: DEBUG nova.compute.manager [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] [instance: e0ae4965-42eb-4286-8cd9-a5c82426f1bf] Received event network-vif-deleted-39d2066d-48ee-40bd-bb98-733c92c48910 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1225.475435] env[61545]: DEBUG nova.compute.manager [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-vif-deleted-d2fa1081-8512-43d3-ba49-d739e11278fe {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1225.475602] env[61545]: INFO nova.compute.manager [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Neutron deleted interface d2fa1081-8512-43d3-ba49-d739e11278fe; detaching it from the instance and deleting it from the info cache [ 1225.475881] env[61545]: DEBUG nova.network.neutron [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "189f9616-2a3c-4fd7-964c-63b93549ef08", "address": "fa:16:3e:b4:2b:14", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189f9616-2a", "ovs_interfaceid": "189f9616-2a3c-4fd7-964c-63b93549ef08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.484086] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1225.484225] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffde584e-ec1c-4859-be73-012692573566 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.556214] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1225.556448] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1225.557135] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleting the datastore file [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1225.557483] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be23afe8-7be5-4eae-a451-72b76db1e13f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.565343] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1225.565343] env[61545]: value = "task-4256794" [ 1225.565343] env[61545]: _type = "Task" [ 1225.565343] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.572866] env[61545]: DEBUG nova.compute.utils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1225.578378] env[61545]: DEBUG nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1225.578648] env[61545]: DEBUG nova.network.neutron [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1225.581305] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256794, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.619857] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6c2fd8e8-fe90-4750-80f6-ec35f7a21e65 tempest-DeleteServersTestJSON-890366011 tempest-DeleteServersTestJSON-890366011-project-member] Lock "219981bd-04cd-4253-a15e-eebd4083bfa8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.246s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.623333] env[61545]: DEBUG nova.policy [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d0d78511dd5408cba4db4e57271b5c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b790c7b2af394de28f7f42ce0d230346', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1225.856986] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.857618] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.857754] env[61545]: DEBUG nova.objects.instance [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lazy-loading 'resources' on Instance uuid e0ae4965-42eb-4286-8cd9-a5c82426f1bf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1225.876554] env[61545]: DEBUG nova.compute.manager [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-vif-deleted-189f9616-2a3c-4fd7-964c-63b93549ef08 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1225.876859] env[61545]: INFO nova.compute.manager [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Neutron deleted interface 189f9616-2a3c-4fd7-964c-63b93549ef08; detaching it from the instance and deleting it from the info cache [ 1225.878270] env[61545]: DEBUG nova.network.neutron [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.894575] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256792, 'name': ReconfigVM_Task, 'duration_secs': 0.860704} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.894647] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc/40bade64-b16b-4a33-a9ea-18f80a32c6bc.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1225.895530] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9981eb34-cd24-4919-a918-813596fd1d59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.904311] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 1be4da80-c9ee-424e-b4e3-bdd22eb0cd67] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1225.906760] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1225.906760] env[61545]: value = "task-4256795" [ 1225.906760] env[61545]: _type = "Task" [ 1225.906760] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.918029] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256795, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.979616] env[61545]: DEBUG oslo_concurrency.lockutils [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.979845] env[61545]: DEBUG oslo_concurrency.lockutils [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] Acquired lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.981085] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770889a1-19cc-40dc-b43b-d6538e2f4d0d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.001517] env[61545]: DEBUG oslo_concurrency.lockutils [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] Releasing lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.001825] env[61545]: WARNING nova.compute.manager [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Detach interface failed, port_id=d2fa1081-8512-43d3-ba49-d739e11278fe, reason: No device with interface-id d2fa1081-8512-43d3-ba49-d739e11278fe exists on VM: nova.exception.NotFound: No device with interface-id d2fa1081-8512-43d3-ba49-d739e11278fe exists on VM [ 1226.002166] env[61545]: DEBUG nova.compute.manager [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received event network-vif-unplugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1226.002437] env[61545]: DEBUG oslo_concurrency.lockutils [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.002685] env[61545]: DEBUG oslo_concurrency.lockutils [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.002950] env[61545]: DEBUG oslo_concurrency.lockutils [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.003094] env[61545]: DEBUG nova.compute.manager [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] No waiting events found dispatching network-vif-unplugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1226.003372] env[61545]: WARNING nova.compute.manager [req-b7657e6f-8913-4a78-92d9-e8f9ff830b0d req-8b1ba606-3ac8-48e4-bbab-4bf8cbe0afb0 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received unexpected event network-vif-unplugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 for instance with vm_state shelved and task_state shelving_offloading. [ 1226.079028] env[61545]: DEBUG oslo_vmware.api [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256794, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173413} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.079213] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1226.079856] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1226.079856] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1226.084237] env[61545]: DEBUG nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1226.125426] env[61545]: INFO nova.scheduler.client.report [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted allocations for instance bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 [ 1226.334620] env[61545]: DEBUG nova.network.neutron [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Successfully created port: eb87717b-d938-49ad-b113-77d8ae6cfa5b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1226.385020] env[61545]: DEBUG oslo_concurrency.lockutils [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.385294] env[61545]: DEBUG oslo_concurrency.lockutils [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Acquired lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.387401] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d1e6a6-ad28-486a-b3c1-b14ac09dd8e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.392698] env[61545]: INFO nova.network.neutron [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Port d2fa1081-8512-43d3-ba49-d739e11278fe from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1226.392909] env[61545]: INFO nova.network.neutron [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Port 189f9616-2a3c-4fd7-964c-63b93549ef08 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1226.393302] env[61545]: DEBUG nova.network.neutron [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [{"id": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "address": "fa:16:3e:c9:9c:15", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc279f08f-d4", "ovs_interfaceid": "c279f08f-d443-4a8b-bd37-296ed181c6a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.408859] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.417033] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114c3330-6ce4-41cd-a54d-4917a6a04c8b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.420957] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 9b62358e-c834-461c-9954-49f513b0f4ac] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1226.423602] env[61545]: DEBUG oslo_concurrency.lockutils [None req-71605e2e-74b9-4894-bfa9-3fc013b959e1 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-3b4fd643-c536-4da9-b1a3-82cd74d24f3e-d2fa1081-8512-43d3-ba49-d739e11278fe" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 8.603s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.431025] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256795, 'name': Rename_Task, 'duration_secs': 0.217821} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.450931] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1226.457675] env[61545]: DEBUG nova.virt.vmwareapi.vmops [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfiguring VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1226.460793] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05fad89e-4f8a-4e9b-93aa-75f96177e712 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.467114] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.467114] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ceb16e61-af10-4f5d-99e2-27efcb92a93f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.484856] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1226.484856] env[61545]: value = "task-4256797" [ 1226.484856] env[61545]: _type = "Task" [ 1226.484856] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.488162] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Waiting for the task: (returnval){ [ 1226.488162] env[61545]: value = "task-4256798" [ 1226.488162] env[61545]: _type = "Task" [ 1226.488162] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.503010] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.507145] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.600230] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3541d2-36d3-45e5-821e-c740f5339274 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.608116] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b409c7-41b0-4bd2-bd15-9564bc96f5f6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.645703] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.646863] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c089dd-e926-46f0-a500-5585c35f9266 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.656766] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71c150c-234c-4587-8f05-dcc4b29ac371 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.674156] env[61545]: DEBUG nova.compute.provider_tree [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.927342] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 62301196-fb8a-45fe-9193-0ad8f7126ab5] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1227.003512] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256797, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.003728] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.103905] env[61545]: DEBUG nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1227.135447] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1227.135447] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1227.135668] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1227.135755] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1227.135931] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1227.136116] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1227.136338] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1227.136497] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1227.136664] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1227.136828] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1227.137032] env[61545]: DEBUG nova.virt.hardware [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1227.137922] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f84806-4f7c-44f5-b376-028222ef0450 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.147006] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5251f0e-ff4f-4e7b-8bda-831685a90ea4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.177519] env[61545]: DEBUG nova.scheduler.client.report [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1227.431903] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: e3742aa7-0b26-41f5-b8c0-9388ef2b7e74] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1227.499433] env[61545]: DEBUG nova.compute.manager [req-6db5a065-88a6-4e7f-a7d6-fc26960cb0e0 req-dc0ed741-dd42-4d32-aae2-e6fe941774fd service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received event network-changed-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1227.499568] env[61545]: DEBUG nova.compute.manager [req-6db5a065-88a6-4e7f-a7d6-fc26960cb0e0 req-dc0ed741-dd42-4d32-aae2-e6fe941774fd service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Refreshing instance network info cache due to event network-changed-e06d5cf8-450e-488f-8ba6-9e7d62811ba1. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1227.499786] env[61545]: DEBUG oslo_concurrency.lockutils [req-6db5a065-88a6-4e7f-a7d6-fc26960cb0e0 req-dc0ed741-dd42-4d32-aae2-e6fe941774fd service nova] Acquiring lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.499927] env[61545]: DEBUG oslo_concurrency.lockutils [req-6db5a065-88a6-4e7f-a7d6-fc26960cb0e0 req-dc0ed741-dd42-4d32-aae2-e6fe941774fd service nova] Acquired lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.500113] env[61545]: DEBUG nova.network.neutron [req-6db5a065-88a6-4e7f-a7d6-fc26960cb0e0 req-dc0ed741-dd42-4d32-aae2-e6fe941774fd service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Refreshing network info cache for port e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1227.509264] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256797, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.515024] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.683504] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.826s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.686866] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.040s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.686866] env[61545]: DEBUG nova.objects.instance [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'resources' on Instance uuid bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.708439] env[61545]: INFO nova.scheduler.client.report [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Deleted allocations for instance e0ae4965-42eb-4286-8cd9-a5c82426f1bf [ 1227.909635] env[61545]: DEBUG nova.compute.manager [req-a9c362f4-989d-4cd0-9df8-92e493487a98 req-d810f173-7c29-4577-8561-544e23631743 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Received event network-changed-4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1227.909635] env[61545]: DEBUG nova.compute.manager [req-a9c362f4-989d-4cd0-9df8-92e493487a98 req-d810f173-7c29-4577-8561-544e23631743 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Refreshing instance network info cache due to event network-changed-4bd4ffa1-26c1-49ba-b154-0c04a3294c9d. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1227.909770] env[61545]: DEBUG oslo_concurrency.lockutils [req-a9c362f4-989d-4cd0-9df8-92e493487a98 req-d810f173-7c29-4577-8561-544e23631743 service nova] Acquiring lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.910347] env[61545]: DEBUG oslo_concurrency.lockutils [req-a9c362f4-989d-4cd0-9df8-92e493487a98 req-d810f173-7c29-4577-8561-544e23631743 service nova] Acquired lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.910347] env[61545]: DEBUG nova.network.neutron [req-a9c362f4-989d-4cd0-9df8-92e493487a98 req-d810f173-7c29-4577-8561-544e23631743 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Refreshing network info cache for port 4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1227.935499] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: dad53420-37f1-42ef-b0d3-e35c73b97417] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1228.002829] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.008058] env[61545]: DEBUG oslo_vmware.api [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256797, 'name': PowerOnVM_Task, 'duration_secs': 1.050088} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.009519] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1228.009519] env[61545]: INFO nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Took 10.19 seconds to spawn the instance on the hypervisor. [ 1228.009519] env[61545]: DEBUG nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1228.009885] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28ad7b4-e8bb-4837-a046-0507d3242f68 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.191076] env[61545]: DEBUG nova.objects.instance [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'numa_topology' on Instance uuid bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1228.200323] env[61545]: DEBUG nova.network.neutron [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Successfully updated port: eb87717b-d938-49ad-b113-77d8ae6cfa5b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1228.221026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d7841e31-53b9-40ce-a055-a6257c8889b2 tempest-AttachVolumeTestJSON-1443084926 tempest-AttachVolumeTestJSON-1443084926-project-member] Lock "e0ae4965-42eb-4286-8cd9-a5c82426f1bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.573s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.374333] env[61545]: DEBUG nova.network.neutron [req-6db5a065-88a6-4e7f-a7d6-fc26960cb0e0 req-dc0ed741-dd42-4d32-aae2-e6fe941774fd service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updated VIF entry in instance network info cache for port e06d5cf8-450e-488f-8ba6-9e7d62811ba1. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1228.374719] env[61545]: DEBUG nova.network.neutron [req-6db5a065-88a6-4e7f-a7d6-fc26960cb0e0 req-dc0ed741-dd42-4d32-aae2-e6fe941774fd service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating instance_info_cache with network_info: [{"id": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "address": "fa:16:3e:bf:70:d6", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape06d5cf8-45", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.441565] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 69c59bd5-1f57-4fa2-afab-348e5f57501e] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1228.503827] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.527126] env[61545]: INFO nova.compute.manager [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Took 15.24 seconds to build instance. [ 1228.693643] env[61545]: DEBUG nova.objects.base [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1228.703626] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "refresh_cache-a72b998f-68c0-4f31-8051-9b9ced8ff304" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.703813] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "refresh_cache-a72b998f-68c0-4f31-8051-9b9ced8ff304" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.704126] env[61545]: DEBUG nova.network.neutron [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1228.765595] env[61545]: DEBUG nova.network.neutron [req-a9c362f4-989d-4cd0-9df8-92e493487a98 req-d810f173-7c29-4577-8561-544e23631743 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Updated VIF entry in instance network info cache for port 4bd4ffa1-26c1-49ba-b154-0c04a3294c9d. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1228.766027] env[61545]: DEBUG nova.network.neutron [req-a9c362f4-989d-4cd0-9df8-92e493487a98 req-d810f173-7c29-4577-8561-544e23631743 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Updating instance_info_cache with network_info: [{"id": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "address": "fa:16:3e:b0:54:e7", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd4ffa1-26", "ovs_interfaceid": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.809007] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcad3c5f-b2da-44ea-bd59-b4806964e627 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.819173] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1944889-50f7-4757-af05-0901cd497385 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.853729] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f177aca6-ba08-4843-95e3-068676894e64 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.862474] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ee5566-a5f4-477e-8f45-4b0bba857649 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.877558] env[61545]: DEBUG oslo_concurrency.lockutils [req-6db5a065-88a6-4e7f-a7d6-fc26960cb0e0 req-dc0ed741-dd42-4d32-aae2-e6fe941774fd service nova] Releasing lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.878217] env[61545]: DEBUG nova.compute.provider_tree [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.944593] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: ced5bde7-07b9-4d07-8b13-49f6fb006eed] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1229.006469] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.029362] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9d05f9-4427-4d4f-a39d-9fe518d13aa7 tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.747s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.239147] env[61545]: DEBUG nova.network.neutron [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1229.268768] env[61545]: DEBUG oslo_concurrency.lockutils [req-a9c362f4-989d-4cd0-9df8-92e493487a98 req-d810f173-7c29-4577-8561-544e23631743 service nova] Releasing lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.381632] env[61545]: DEBUG nova.scheduler.client.report [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1229.447819] env[61545]: DEBUG nova.network.neutron [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Updating instance_info_cache with network_info: [{"id": "eb87717b-d938-49ad-b113-77d8ae6cfa5b", "address": "fa:16:3e:dc:9c:63", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb87717b-d9", "ovs_interfaceid": "eb87717b-d938-49ad-b113-77d8ae6cfa5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.449429] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 9cf6dd9e-40e9-4df6-9342-2850e0f93d85] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1229.506959] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.531544] env[61545]: DEBUG nova.compute.manager [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Received event network-vif-plugged-eb87717b-d938-49ad-b113-77d8ae6cfa5b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1229.531783] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] Acquiring lock "a72b998f-68c0-4f31-8051-9b9ced8ff304-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.531967] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.532200] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.532283] env[61545]: DEBUG nova.compute.manager [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] No waiting events found dispatching network-vif-plugged-eb87717b-d938-49ad-b113-77d8ae6cfa5b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1229.532449] env[61545]: WARNING nova.compute.manager [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Received unexpected event network-vif-plugged-eb87717b-d938-49ad-b113-77d8ae6cfa5b for instance with vm_state building and task_state spawning. [ 1229.532608] env[61545]: DEBUG nova.compute.manager [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Received event network-changed-eb87717b-d938-49ad-b113-77d8ae6cfa5b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1229.532760] env[61545]: DEBUG nova.compute.manager [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Refreshing instance network info cache due to event network-changed-eb87717b-d938-49ad-b113-77d8ae6cfa5b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1229.533072] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] Acquiring lock "refresh_cache-a72b998f-68c0-4f31-8051-9b9ced8ff304" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.588155] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.887146] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.201s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.952721] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "refresh_cache-a72b998f-68c0-4f31-8051-9b9ced8ff304" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.953236] env[61545]: DEBUG nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Instance network_info: |[{"id": "eb87717b-d938-49ad-b113-77d8ae6cfa5b", "address": "fa:16:3e:dc:9c:63", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb87717b-d9", "ovs_interfaceid": "eb87717b-d938-49ad-b113-77d8ae6cfa5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1229.953752] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] Acquired lock "refresh_cache-a72b998f-68c0-4f31-8051-9b9ced8ff304" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.954142] env[61545]: DEBUG nova.network.neutron [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Refreshing network info cache for port eb87717b-d938-49ad-b113-77d8ae6cfa5b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1229.955419] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: e21de424-8121-4e2f-84c2-8096ba8048cc] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1229.957205] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:9c:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a071ecf4-e713-4f97-9271-8c17952f6dee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb87717b-d938-49ad-b113-77d8ae6cfa5b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1229.966589] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1229.967731] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1229.968640] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcbaf965-9e75-4f79-a5c0-4b8420577092 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.002320] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1230.002320] env[61545]: value = "task-4256800" [ 1230.002320] env[61545]: _type = "Task" [ 1230.002320] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.009288] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.016211] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256800, 'name': CreateVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.402970] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30ff5c18-6e61-4372-b17b-a0d263234c31 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.031s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.405771] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.816s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.405771] env[61545]: INFO nova.compute.manager [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Unshelving [ 1230.468564] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 16bc91d0-71c3-4bd9-980b-6574c3fd9335] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1230.516120] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256800, 'name': CreateVM_Task, 'duration_secs': 0.347525} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.516730] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.516927] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1230.518086] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.518273] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.518601] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1230.518876] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f11731d-c97f-4f41-b241-d462b424f3cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.525201] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1230.525201] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5256e1b2-1664-03bb-3cfe-90e9cbdc3c92" [ 1230.525201] env[61545]: _type = "Task" [ 1230.525201] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.535779] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5256e1b2-1664-03bb-3cfe-90e9cbdc3c92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.974782] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 5f4d6338-d1af-4e58-9f76-5e95d51e76f7] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1231.013154] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.037639] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5256e1b2-1664-03bb-3cfe-90e9cbdc3c92, 'name': SearchDatastore_Task, 'duration_secs': 0.012386} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.040374] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.040692] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1231.041369] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.041577] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.041904] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.042939] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e8f173a-361d-45ba-80bb-249a61cd1047 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.053114] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1231.053577] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1231.054119] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73c04028-77ec-4397-95a5-797a78070748 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.060362] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1231.060362] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e62153-e353-fc3a-d255-7c02da588ddf" [ 1231.060362] env[61545]: _type = "Task" [ 1231.060362] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.070327] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e62153-e353-fc3a-d255-7c02da588ddf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.081985] env[61545]: DEBUG nova.network.neutron [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Updated VIF entry in instance network info cache for port eb87717b-d938-49ad-b113-77d8ae6cfa5b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1231.082379] env[61545]: DEBUG nova.network.neutron [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Updating instance_info_cache with network_info: [{"id": "eb87717b-d938-49ad-b113-77d8ae6cfa5b", "address": "fa:16:3e:dc:9c:63", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb87717b-d9", "ovs_interfaceid": "eb87717b-d938-49ad-b113-77d8ae6cfa5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.446217] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.446489] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.446729] env[61545]: DEBUG nova.objects.instance [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'pci_requests' on Instance uuid bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1231.477446] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 8d838d3b-32ad-4bb2-839e-6bd81c363447] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1231.507737] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.560707] env[61545]: DEBUG nova.compute.manager [req-8a07654e-45b1-44fd-b4a0-793f0a09abea req-f16f0181-9bd0-4cb3-a1c3-d6a2ebc8dd4a service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received event network-changed-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1231.560707] env[61545]: DEBUG nova.compute.manager [req-8a07654e-45b1-44fd-b4a0-793f0a09abea req-f16f0181-9bd0-4cb3-a1c3-d6a2ebc8dd4a service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Refreshing instance network info cache due to event network-changed-ff62e994-2e58-433b-884f-5b4fa7639d6b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1231.560707] env[61545]: DEBUG oslo_concurrency.lockutils [req-8a07654e-45b1-44fd-b4a0-793f0a09abea req-f16f0181-9bd0-4cb3-a1c3-d6a2ebc8dd4a service nova] Acquiring lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.560707] env[61545]: DEBUG oslo_concurrency.lockutils [req-8a07654e-45b1-44fd-b4a0-793f0a09abea req-f16f0181-9bd0-4cb3-a1c3-d6a2ebc8dd4a service nova] Acquired lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.560707] env[61545]: DEBUG nova.network.neutron [req-8a07654e-45b1-44fd-b4a0-793f0a09abea req-f16f0181-9bd0-4cb3-a1c3-d6a2ebc8dd4a service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Refreshing network info cache for port ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1231.572551] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e62153-e353-fc3a-d255-7c02da588ddf, 'name': SearchDatastore_Task, 'duration_secs': 0.01029} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.573942] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11b5ea52-4fc9-4a85-b995-51bd5624b6c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.580667] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1231.580667] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c25900-78f9-6493-15ca-32a367a3a993" [ 1231.580667] env[61545]: _type = "Task" [ 1231.580667] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.585576] env[61545]: DEBUG oslo_concurrency.lockutils [req-6a155609-7c55-4824-acfc-637315a45c4b req-c081bd40-3b9f-4943-a1b8-dcacaed54764 service nova] Releasing lock "refresh_cache-a72b998f-68c0-4f31-8051-9b9ced8ff304" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.592470] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c25900-78f9-6493-15ca-32a367a3a993, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.951532] env[61545]: DEBUG nova.objects.instance [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'numa_topology' on Instance uuid bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1231.981074] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 26e339f1-182b-4f00-b7c2-a2a32e942d04] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1232.008328] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.095159] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c25900-78f9-6493-15ca-32a367a3a993, 'name': SearchDatastore_Task, 'duration_secs': 0.010854} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.095431] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.095708] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a72b998f-68c0-4f31-8051-9b9ced8ff304/a72b998f-68c0-4f31-8051-9b9ced8ff304.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1232.096023] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d183ae79-33c2-4013-97f2-d2bbf0d1412f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.103737] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1232.103737] env[61545]: value = "task-4256802" [ 1232.103737] env[61545]: _type = "Task" [ 1232.103737] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.113337] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.433447] env[61545]: DEBUG nova.network.neutron [req-8a07654e-45b1-44fd-b4a0-793f0a09abea req-f16f0181-9bd0-4cb3-a1c3-d6a2ebc8dd4a service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updated VIF entry in instance network info cache for port ff62e994-2e58-433b-884f-5b4fa7639d6b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1232.435036] env[61545]: DEBUG nova.network.neutron [req-8a07654e-45b1-44fd-b4a0-793f0a09abea req-f16f0181-9bd0-4cb3-a1c3-d6a2ebc8dd4a service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [{"id": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "address": "fa:16:3e:09:25:39", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff62e994-2e", "ovs_interfaceid": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.454725] env[61545]: INFO nova.compute.claims [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1232.485890] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 7f6f2ef7-5ba6-4a80-a155-2fa0c7b627dc] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1232.512143] env[61545]: DEBUG oslo_vmware.api [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Task: {'id': task-4256798, 'name': ReconfigVM_Task, 'duration_secs': 5.772476} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.512957] env[61545]: DEBUG oslo_concurrency.lockutils [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] Releasing lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.513447] env[61545]: DEBUG nova.virt.vmwareapi.vmops [req-45c3a99b-bf35-40bd-bc8b-b41a5035d859 req-cee14a50-0233-4cf4-813e-02bfb13b6543 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Reconfigured VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1232.513778] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.051s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.513987] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.514264] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.514422] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.516761] env[61545]: INFO nova.compute.manager [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Terminating instance [ 1232.616834] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256802, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.937464] env[61545]: DEBUG oslo_concurrency.lockutils [req-8a07654e-45b1-44fd-b4a0-793f0a09abea req-f16f0181-9bd0-4cb3-a1c3-d6a2ebc8dd4a service nova] Releasing lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.990176] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: d7e25ea6-7076-4ab2-aed6-fe5232c2665d] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1233.021365] env[61545]: DEBUG nova.compute.manager [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1233.021501] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1233.023564] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23aea5c6-715f-43ef-9224-76988426f44d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.033097] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1233.033097] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a550ccdc-dc4b-4eb7-b52a-0aaa5d899e7b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.040829] env[61545]: DEBUG oslo_vmware.api [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1233.040829] env[61545]: value = "task-4256803" [ 1233.040829] env[61545]: _type = "Task" [ 1233.040829] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.051155] env[61545]: DEBUG oslo_vmware.api [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256803, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.115371] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256802, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572038} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.115709] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] a72b998f-68c0-4f31-8051-9b9ced8ff304/a72b998f-68c0-4f31-8051-9b9ced8ff304.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1233.116012] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1233.116358] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29671341-02a9-4ddc-877e-3e7abc8d409a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.125497] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1233.125497] env[61545]: value = "task-4256804" [ 1233.125497] env[61545]: _type = "Task" [ 1233.125497] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.139789] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256804, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.496256] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 4b29ebc4-d913-447c-bc57-890953cf8d49] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1233.567184] env[61545]: DEBUG oslo_vmware.api [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256803, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.645835] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256804, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.370274} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.645835] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1233.646764] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f461a246-db4c-48ff-9d48-b66aa73ccf05 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.653936] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972cd054-91c2-4ecb-857d-6cb501afee6e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.674950] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] a72b998f-68c0-4f31-8051-9b9ced8ff304/a72b998f-68c0-4f31-8051-9b9ced8ff304.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1233.675737] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-087fff41-89c1-43cd-afae-19703ea7d1bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.692956] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8acafd-ae74-4957-9c49-2fdd87be2215 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.697967] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1233.697967] env[61545]: value = "task-4256805" [ 1233.697967] env[61545]: _type = "Task" [ 1233.697967] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.726806] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e702d109-73cf-445f-a3d0-771cf6f59b0f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.733297] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256805, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.739468] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12954e63-c6a5-4e5f-b3af-86a43f4f12fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.753752] env[61545]: DEBUG nova.compute.provider_tree [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.000213] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 844f01ed-4dae-4e13-9d1c-09a73f413201] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1234.056250] env[61545]: DEBUG oslo_vmware.api [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256803, 'name': PowerOffVM_Task, 'duration_secs': 0.596302} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.056250] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1234.056250] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1234.056250] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3373c866-28ae-4fe8-9147-ff727961a1be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.214022] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256805, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.259048] env[61545]: DEBUG nova.scheduler.client.report [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1234.287570] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1234.287570] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1234.287570] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleting the datastore file [datastore2] 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1234.287570] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6094bdd2-54e6-4192-8179-2a10275f7b26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.296083] env[61545]: DEBUG oslo_vmware.api [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1234.296083] env[61545]: value = "task-4256807" [ 1234.296083] env[61545]: _type = "Task" [ 1234.296083] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.306439] env[61545]: DEBUG oslo_vmware.api [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.505758] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: eced4107-b99e-479e-b22c-2157320ecf95] Instance has had 0 of 5 cleanup attempts {{(pid=61545) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1234.682728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.682728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.711280] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256805, 'name': ReconfigVM_Task, 'duration_secs': 0.728742} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.711595] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Reconfigured VM instance instance-0000006d to attach disk [datastore2] a72b998f-68c0-4f31-8051-9b9ced8ff304/a72b998f-68c0-4f31-8051-9b9ced8ff304.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1234.712278] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e4fdfe5-4e58-49bb-a1d0-7d00f056796b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.720349] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1234.720349] env[61545]: value = "task-4256808" [ 1234.720349] env[61545]: _type = "Task" [ 1234.720349] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.729877] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256808, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.764824] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.318s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.808688] env[61545]: DEBUG oslo_vmware.api [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.500376} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.809031] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1234.809696] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1234.809696] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1234.809696] env[61545]: INFO nova.compute.manager [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Took 1.79 seconds to destroy the instance on the hypervisor. [ 1234.809969] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1234.810229] env[61545]: DEBUG nova.compute.manager [-] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1234.810280] env[61545]: DEBUG nova.network.neutron [-] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1234.812803] env[61545]: INFO nova.network.neutron [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating port e06d5cf8-450e-488f-8ba6-9e7d62811ba1 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1235.013567] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.013845] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.015270] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.185707] env[61545]: INFO nova.compute.manager [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Detaching volume f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75 [ 1235.226030] env[61545]: INFO nova.virt.block_device [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Attempting to driver detach volume f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75 from mountpoint /dev/sdb [ 1235.226274] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1235.226456] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1235.227303] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b951053a-4f86-405a-8d90-2c51a4d34ebd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.236775] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256808, 'name': Rename_Task, 'duration_secs': 0.178447} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.255709] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1235.256141] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9587606-9546-4641-bdc8-484397950b02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.258380] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51189e22-6350-4090-9385-ba931f8ac77f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.267820] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d023e7ae-93b8-48c0-9c49-d0cb87f9553b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.271016] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1235.271016] env[61545]: value = "task-4256810" [ 1235.271016] env[61545]: _type = "Task" [ 1235.271016] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.292943] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d242921-3c7f-42ee-9cb8-775a68fff753 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.299487] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256810, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.313605] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] The volume has not been displaced from its original location: [datastore1] volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75/volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1235.319115] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfiguring VM instance instance-0000005d to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1235.319510] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be0ae26a-5a96-4bc6-a6f3-6eb311480aaf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.338695] env[61545]: DEBUG oslo_vmware.api [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1235.338695] env[61545]: value = "task-4256811" [ 1235.338695] env[61545]: _type = "Task" [ 1235.338695] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.347476] env[61545]: DEBUG oslo_vmware.api [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256811, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.520487] env[61545]: DEBUG nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1235.523941] env[61545]: DEBUG nova.compute.manager [req-269cbe6a-1586-4e56-9af9-349706377820 req-112b4ddb-6b17-4936-8c7e-e589dc260295 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Received event network-vif-deleted-c279f08f-d443-4a8b-bd37-296ed181c6a7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1235.524204] env[61545]: INFO nova.compute.manager [req-269cbe6a-1586-4e56-9af9-349706377820 req-112b4ddb-6b17-4936-8c7e-e589dc260295 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Neutron deleted interface c279f08f-d443-4a8b-bd37-296ed181c6a7; detaching it from the instance and deleting it from the info cache [ 1235.524427] env[61545]: DEBUG nova.network.neutron [req-269cbe6a-1586-4e56-9af9-349706377820 req-112b4ddb-6b17-4936-8c7e-e589dc260295 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.782271] env[61545]: DEBUG oslo_vmware.api [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256810, 'name': PowerOnVM_Task, 'duration_secs': 0.506462} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.782555] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1235.783762] env[61545]: INFO nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Took 8.68 seconds to spawn the instance on the hypervisor. [ 1235.784023] env[61545]: DEBUG nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1235.785350] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb0b195-acd8-4440-a3a4-c8d35f79997e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.851760] env[61545]: DEBUG oslo_vmware.api [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256811, 'name': ReconfigVM_Task, 'duration_secs': 0.321728} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.852084] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Reconfigured VM instance instance-0000005d to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1235.857733] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abb811e5-e5ee-49f9-9f28-852ffdd85d5f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.878190] env[61545]: DEBUG oslo_vmware.api [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1235.878190] env[61545]: value = "task-4256812" [ 1235.878190] env[61545]: _type = "Task" [ 1235.878190] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.889478] env[61545]: DEBUG oslo_vmware.api [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256812, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.003583] env[61545]: DEBUG nova.network.neutron [-] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.006186] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1236.035177] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5767e3a-6765-418b-8ad1-1720d6578eac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.046351] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e64c034-9dcf-4623-a4d9-b4c77b48663c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.058861] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.059304] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.060776] env[61545]: INFO nova.compute.claims [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1236.085392] env[61545]: DEBUG nova.compute.manager [req-269cbe6a-1586-4e56-9af9-349706377820 req-112b4ddb-6b17-4936-8c7e-e589dc260295 service nova] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Detach interface failed, port_id=c279f08f-d443-4a8b-bd37-296ed181c6a7, reason: Instance 3b4fd643-c536-4da9-b1a3-82cd74d24f3e could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1236.304593] env[61545]: INFO nova.compute.manager [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Took 13.63 seconds to build instance. [ 1236.389570] env[61545]: DEBUG oslo_vmware.api [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256812, 'name': ReconfigVM_Task, 'duration_secs': 0.161626} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.389877] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838830', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'name': 'volume-f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478', 'attached_at': '', 'detached_at': '', 'volume_id': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75', 'serial': 'f20d1d3b-c7a7-4305-84d5-1ae60bfa7b75'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1236.507374] env[61545]: INFO nova.compute.manager [-] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] Took 1.70 seconds to deallocate network for instance. [ 1236.519977] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1236.573433] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.573433] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.573433] env[61545]: DEBUG nova.network.neutron [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1236.806972] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0482bbe0-3b0a-4f04-873a-8e56b9f064ba tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.136s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.936454] env[61545]: DEBUG nova.objects.instance [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'flavor' on Instance uuid c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1237.016445] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.028950] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Getting list of instances from cluster (obj){ [ 1237.028950] env[61545]: value = "domain-c8" [ 1237.028950] env[61545]: _type = "ClusterComputeResource" [ 1237.028950] env[61545]: } {{(pid=61545) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1237.028950] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73301ab1-a136-40ca-bf8b-ae0f33661b3f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.048198] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Got total of 7 instances {{(pid=61545) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1237.048388] env[61545]: WARNING nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] While synchronizing instance power states, found 10 instances in the database and 7 instances on the hypervisor. [ 1237.048654] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid d0f42893-3332-4027-93df-bb46e3350485 {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.048758] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.048941] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.049155] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.049329] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid 5393730d-6a4b-418a-9047-4287f87c8d14 {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.049482] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid 9a59f45b-727f-45ea-ad33-64fa23aaffe7 {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.049630] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid fde00c6e-29b5-4b99-944a-c0404e4f2fae {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.049798] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid 40bade64-b16b-4a33-a9ea-18f80a32c6bc {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.049988] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid a72b998f-68c0-4f31-8051-9b9ced8ff304 {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.050164] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Triggering sync for uuid c7b7f132-d863-4271-94be-a3c0aaed43fa {{(pid=61545) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1237.050533] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "d0f42893-3332-4027-93df-bb46e3350485" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.050789] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "d0f42893-3332-4027-93df-bb46e3350485" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.051099] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.051328] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.051548] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.051801] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "5393730d-6a4b-418a-9047-4287f87c8d14" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.051988] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.052246] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.052428] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.052656] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.052835] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.053101] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.053286] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.053511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.053685] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.053907] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.054852] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5d8fc5-0ca4-4788-aae2-0b24abd722cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.058404] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4c477d-2345-4969-8845-706b82b0af83 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.061343] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdae222-7723-4112-9fca-e63f703b2071 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.064764] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2767e4-b7df-4f83-b443-8a6aaf623394 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.067385] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7c23a8-ba0f-4af8-9439-f6b1be1e34a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.070221] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb539082-73f7-47a6-8a4a-fa4089923e77 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.240904] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f885ecb7-8711-4c03-9a7c-680dd08bb46d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.249147] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae546c73-d447-4bc9-aefd-3639ce706f62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.281806] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7824c0-5f33-4f3b-8d50-97f5c31e7a03 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.290323] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090c0a11-62dc-4db8-81a8-7c268e0d90b1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.310539] env[61545]: DEBUG nova.compute.provider_tree [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.430819] env[61545]: DEBUG nova.network.neutron [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating instance_info_cache with network_info: [{"id": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "address": "fa:16:3e:bf:70:d6", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06d5cf8-45", "ovs_interfaceid": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.547824] env[61545]: DEBUG nova.compute.manager [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received event network-vif-plugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1237.548329] env[61545]: DEBUG oslo_concurrency.lockutils [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.548329] env[61545]: DEBUG oslo_concurrency.lockutils [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.548482] env[61545]: DEBUG oslo_concurrency.lockutils [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.548655] env[61545]: DEBUG nova.compute.manager [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] No waiting events found dispatching network-vif-plugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1237.548819] env[61545]: WARNING nova.compute.manager [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received unexpected event network-vif-plugged-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 for instance with vm_state shelved_offloaded and task_state spawning. [ 1237.548999] env[61545]: DEBUG nova.compute.manager [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received event network-changed-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1237.549351] env[61545]: DEBUG nova.compute.manager [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Refreshing instance network info cache due to event network-changed-e06d5cf8-450e-488f-8ba6-9e7d62811ba1. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1237.549527] env[61545]: DEBUG oslo_concurrency.lockutils [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] Acquiring lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.591052] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "d0f42893-3332-4027-93df-bb46e3350485" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.600838] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.548s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.604794] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.553s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.607302] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.554s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.607599] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.554s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.614579] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.561s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.814032] env[61545]: DEBUG nova.scheduler.client.report [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1237.843277] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.843739] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.844123] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "a72b998f-68c0-4f31-8051-9b9ced8ff304-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.844433] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.844718] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.847669] env[61545]: INFO nova.compute.manager [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Terminating instance [ 1237.933644] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.936153] env[61545]: DEBUG oslo_concurrency.lockutils [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] Acquired lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.936428] env[61545]: DEBUG nova.network.neutron [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Refreshing network info cache for port e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1237.944828] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2a423da0-b131-4888-ba92-bfb9b0a18c1e tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.262s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.945906] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.895s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.947420] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f69b53-01a0-4eb7-b712-06dfc4ba0837 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.968186] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9fd2c45168e4a0a5335089d485b4a233',container_format='bare',created_at=2025-06-03T12:56:01Z,direct_url=,disk_format='vmdk',id=30fdee25-4339-4402-9519-ec7f05638745,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-344817171-shelved',owner='50b8a600a38442278d0cf036919f87c2',properties=ImageMetaProps,protected=,size=31663616,status='active',tags=,updated_at=2025-06-03T12:56:17Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1237.968438] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.968631] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1237.968805] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.968957] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1237.969131] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1237.969343] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1237.969506] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1237.969674] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1237.969964] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1237.970042] env[61545]: DEBUG nova.virt.hardware [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1237.971242] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e79a17-966e-42fb-887a-57b9e33e60cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.981325] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a787c6a-e388-4697-b0ad-ffc9344e294d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.996127] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:70:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5116f690-f825-4fee-8a47-42b073e716c5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e06d5cf8-450e-488f-8ba6-9e7d62811ba1', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1238.003608] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1238.003868] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1238.004097] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa4046c8-964c-42ae-b508-95fab5d8dc35 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.023725] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1238.023725] env[61545]: value = "task-4256813" [ 1238.023725] env[61545]: _type = "Task" [ 1238.023725] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.033497] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256813, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.152408] env[61545]: DEBUG oslo_concurrency.lockutils [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "5393730d-6a4b-418a-9047-4287f87c8d14" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.152808] env[61545]: DEBUG oslo_concurrency.lockutils [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.320848] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.261s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.321469] env[61545]: DEBUG nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1238.324395] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.308s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.324629] env[61545]: DEBUG nova.objects.instance [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'resources' on Instance uuid 3b4fd643-c536-4da9-b1a3-82cd74d24f3e {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1238.354035] env[61545]: DEBUG nova.compute.manager [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1238.354349] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1238.355831] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e7fd03-12fa-4f7b-bf2f-fd4557ba0e90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.365224] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1238.365531] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fecd84d8-0ece-48b2-92ab-33df62e7c66a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.373060] env[61545]: DEBUG oslo_vmware.api [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1238.373060] env[61545]: value = "task-4256814" [ 1238.373060] env[61545]: _type = "Task" [ 1238.373060] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.383662] env[61545]: DEBUG oslo_vmware.api [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256814, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.461824] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.516s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.536788] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256813, 'name': CreateVM_Task, 'duration_secs': 0.321959} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.537049] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1238.537977] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.538220] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.538923] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1238.539255] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6dea996-29e2-475c-aa98-a0a08a2664f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.547712] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1238.547712] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52af8892-b457-c4f2-a3e7-bb060ec3296e" [ 1238.547712] env[61545]: _type = "Task" [ 1238.547712] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.558271] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52af8892-b457-c4f2-a3e7-bb060ec3296e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.657218] env[61545]: DEBUG nova.compute.utils [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1238.792093] env[61545]: DEBUG nova.network.neutron [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updated VIF entry in instance network info cache for port e06d5cf8-450e-488f-8ba6-9e7d62811ba1. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1238.793176] env[61545]: DEBUG nova.network.neutron [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating instance_info_cache with network_info: [{"id": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "address": "fa:16:3e:bf:70:d6", "network": {"id": "0b8ddbd4-efdc-4e56-80f3-3ea92efeef6a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1569900370-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50b8a600a38442278d0cf036919f87c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape06d5cf8-45", "ovs_interfaceid": "e06d5cf8-450e-488f-8ba6-9e7d62811ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.835287] env[61545]: DEBUG nova.compute.utils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1238.837548] env[61545]: DEBUG nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1238.837548] env[61545]: DEBUG nova.network.neutron [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1238.884358] env[61545]: DEBUG oslo_vmware.api [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256814, 'name': PowerOffVM_Task, 'duration_secs': 0.198804} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.884358] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1238.884358] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1238.884358] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2fd9af7a-beac-4d64-b581-0f182250f480 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.918870] env[61545]: DEBUG nova.policy [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4333754ae4a4e26bab98dfe1853e667', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b64f16b672ff471ba1d48aa2490b9829', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1238.957401] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1238.957612] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1238.957795] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleting the datastore file [datastore2] a72b998f-68c0-4f31-8051-9b9ced8ff304 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1238.958388] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18ab78b7-3fc5-4c0e-972f-d72b28be037d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.969441] env[61545]: DEBUG oslo_vmware.api [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1238.969441] env[61545]: value = "task-4256816" [ 1238.969441] env[61545]: _type = "Task" [ 1238.969441] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.978187] env[61545]: DEBUG oslo_vmware.api [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.981470] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.981814] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.982118] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.982396] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.982651] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.985562] env[61545]: INFO nova.compute.manager [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Terminating instance [ 1239.028949] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc87e40c-d6d6-49b2-a66e-cadc6b38004e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.037853] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8c3f3a-13b6-4eeb-a10e-745be8aa634d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.077529] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60701187-c490-41e9-a258-685b65202913 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.088734] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6e6eba-8420-445b-a7f7-7a3d9d1767bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.092839] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.093115] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Processing image 30fdee25-4339-4402-9519-ec7f05638745 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1239.093354] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745/30fdee25-4339-4402-9519-ec7f05638745.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.093500] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquired lock "[datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745/30fdee25-4339-4402-9519-ec7f05638745.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.093676] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1239.094348] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4d0d558-ef30-478b-a9a7-ed2f2f181324 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.104112] env[61545]: DEBUG nova.compute.provider_tree [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.127085] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1239.127319] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1239.128166] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e400d811-3a87-4b5c-af39-a3a388e1b67d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.136681] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1239.136681] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5250557a-0b99-abc6-cded-61a4c2580053" [ 1239.136681] env[61545]: _type = "Task" [ 1239.136681] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.145542] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5250557a-0b99-abc6-cded-61a4c2580053, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.159926] env[61545]: DEBUG oslo_concurrency.lockutils [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.295768] env[61545]: DEBUG oslo_concurrency.lockutils [req-95b6ace1-d6e5-4790-adc0-7f6c5917363e req-5fc7792e-5b33-46f7-987e-db79ee696151 service nova] Releasing lock "refresh_cache-bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.300256] env[61545]: DEBUG nova.network.neutron [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Successfully created port: 1df49c85-dd94-41c4-9c7d-d7df50f0288d {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1239.341504] env[61545]: DEBUG nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1239.483165] env[61545]: DEBUG oslo_vmware.api [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.359041} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.483444] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1239.483674] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1239.483817] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1239.483991] env[61545]: INFO nova.compute.manager [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1239.484320] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1239.484519] env[61545]: DEBUG nova.compute.manager [-] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1239.484614] env[61545]: DEBUG nova.network.neutron [-] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1239.489395] env[61545]: DEBUG nova.compute.manager [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1239.489591] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1239.490383] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5f811f-43b1-4dcc-bfd6-b53749462d6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.498121] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1239.498445] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6583fb9-9468-489a-b67f-019500d76511 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.507181] env[61545]: DEBUG oslo_vmware.api [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1239.507181] env[61545]: value = "task-4256817" [ 1239.507181] env[61545]: _type = "Task" [ 1239.507181] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.521078] env[61545]: DEBUG oslo_vmware.api [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.607989] env[61545]: DEBUG nova.scheduler.client.report [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1239.650064] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Preparing fetch location {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1239.650569] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Fetch image to [datastore2] OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667/OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667.vmdk {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1239.651205] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Downloading stream optimized image 30fdee25-4339-4402-9519-ec7f05638745 to [datastore2] OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667/OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667.vmdk on the data store datastore2 as vApp {{(pid=61545) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1239.651205] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Downloading image file data 30fdee25-4339-4402-9519-ec7f05638745 to the ESX as VM named 'OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667' {{(pid=61545) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1239.777188] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1239.777188] env[61545]: value = "resgroup-9" [ 1239.777188] env[61545]: _type = "ResourcePool" [ 1239.777188] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1239.777188] env[61545]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2c25fad9-a7ab-483d-a3e9-81f19d12218a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.801827] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lease: (returnval){ [ 1239.801827] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218bfcf-4c9e-d623-635e-6af4641f8b74" [ 1239.801827] env[61545]: _type = "HttpNfcLease" [ 1239.801827] env[61545]: } obtained for vApp import into resource pool (val){ [ 1239.801827] env[61545]: value = "resgroup-9" [ 1239.801827] env[61545]: _type = "ResourcePool" [ 1239.801827] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1239.802150] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the lease: (returnval){ [ 1239.802150] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218bfcf-4c9e-d623-635e-6af4641f8b74" [ 1239.802150] env[61545]: _type = "HttpNfcLease" [ 1239.802150] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1239.811036] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1239.811036] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218bfcf-4c9e-d623-635e-6af4641f8b74" [ 1239.811036] env[61545]: _type = "HttpNfcLease" [ 1239.811036] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1239.966254] env[61545]: DEBUG nova.compute.manager [req-7d45215c-0c5e-4115-847d-f2d1ad61dc83 req-456e166a-8af5-411f-ba10-36cea6a5ce16 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Received event network-vif-deleted-eb87717b-d938-49ad-b113-77d8ae6cfa5b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1239.966254] env[61545]: INFO nova.compute.manager [req-7d45215c-0c5e-4115-847d-f2d1ad61dc83 req-456e166a-8af5-411f-ba10-36cea6a5ce16 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Neutron deleted interface eb87717b-d938-49ad-b113-77d8ae6cfa5b; detaching it from the instance and deleting it from the info cache [ 1239.966254] env[61545]: DEBUG nova.network.neutron [req-7d45215c-0c5e-4115-847d-f2d1ad61dc83 req-456e166a-8af5-411f-ba10-36cea6a5ce16 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.018634] env[61545]: DEBUG oslo_vmware.api [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256817, 'name': PowerOffVM_Task, 'duration_secs': 0.347726} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.019070] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1240.019293] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1240.019592] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9bb8c053-5a62-435e-a3f9-1d3304297b01 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.094843] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1240.094843] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1240.094843] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleting the datastore file [datastore2] c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1240.094843] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7c28e0f-ad69-4f29-807e-55b235b59df2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.102522] env[61545]: DEBUG oslo_vmware.api [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1240.102522] env[61545]: value = "task-4256820" [ 1240.102522] env[61545]: _type = "Task" [ 1240.102522] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.113307] env[61545]: DEBUG oslo_vmware.api [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.117713] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.793s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.144091] env[61545]: INFO nova.scheduler.client.report [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleted allocations for instance 3b4fd643-c536-4da9-b1a3-82cd74d24f3e [ 1240.295150] env[61545]: DEBUG nova.network.neutron [-] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.313471] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1240.313471] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218bfcf-4c9e-d623-635e-6af4641f8b74" [ 1240.313471] env[61545]: _type = "HttpNfcLease" [ 1240.313471] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1240.356431] env[61545]: DEBUG nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1240.385993] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1240.386339] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1240.386512] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1240.386714] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1240.386875] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1240.387051] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1240.387281] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1240.387440] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1240.387631] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1240.387813] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1240.387990] env[61545]: DEBUG nova.virt.hardware [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1240.388943] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1aae970-2c77-41be-8c07-8e869c598c90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.398119] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a71c4c-89d5-4d2c-a6d8-46131d9bc828 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.423273] env[61545]: DEBUG oslo_concurrency.lockutils [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "5393730d-6a4b-418a-9047-4287f87c8d14" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.423599] env[61545]: DEBUG oslo_concurrency.lockutils [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.423856] env[61545]: INFO nova.compute.manager [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Attaching volume 44c6fe87-2763-45ca-bf43-c6ba0476f9ae to /dev/sdb [ 1240.463256] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f9d815-9e02-40b6-b029-e4187d96e015 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.472186] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f449a25-d32c-4d11-936e-e3bdb15a6dc0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.475171] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d8aca2-058d-4836-8f59-c552c8a2c74b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.485714] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f35cfa3-758a-4c7e-a439-c342568470b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.503030] env[61545]: DEBUG nova.virt.block_device [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Updating existing volume attachment record: 4c03bfb8-b153-496e-a9bc-20158e6935b1 {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1240.527690] env[61545]: DEBUG nova.compute.manager [req-7d45215c-0c5e-4115-847d-f2d1ad61dc83 req-456e166a-8af5-411f-ba10-36cea6a5ce16 service nova] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Detach interface failed, port_id=eb87717b-d938-49ad-b113-77d8ae6cfa5b, reason: Instance a72b998f-68c0-4f31-8051-9b9ced8ff304 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1240.612533] env[61545]: DEBUG oslo_vmware.api [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18841} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.612812] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1240.613006] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1240.613195] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1240.613370] env[61545]: INFO nova.compute.manager [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1240.613608] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1240.613801] env[61545]: DEBUG nova.compute.manager [-] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1240.613897] env[61545]: DEBUG nova.network.neutron [-] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1240.655929] env[61545]: DEBUG oslo_concurrency.lockutils [None req-85e4d083-ce61-4b5b-a5f3-c66fcd69242f tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.142s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.656942] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.605s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.657163] env[61545]: INFO nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 3b4fd643-c536-4da9-b1a3-82cd74d24f3e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1240.657343] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "3b4fd643-c536-4da9-b1a3-82cd74d24f3e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.798217] env[61545]: INFO nova.compute.manager [-] [instance: a72b998f-68c0-4f31-8051-9b9ced8ff304] Took 1.31 seconds to deallocate network for instance. [ 1240.812545] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1240.812545] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218bfcf-4c9e-d623-635e-6af4641f8b74" [ 1240.812545] env[61545]: _type = "HttpNfcLease" [ 1240.812545] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1240.812917] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1240.812917] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218bfcf-4c9e-d623-635e-6af4641f8b74" [ 1240.812917] env[61545]: _type = "HttpNfcLease" [ 1240.812917] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1240.818141] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca7743c-ef67-49a3-83f3-b65056cb5400 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.829568] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a5786b-72a9-65e5-7592-a2a1fd7f5c6b/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1240.829755] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating HTTP connection to write to file with size = 31663616 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a5786b-72a9-65e5-7592-a2a1fd7f5c6b/disk-0.vmdk. {{(pid=61545) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1240.901229] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-31226809-051b-45aa-9e03-053f13fb76e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.290322] env[61545]: DEBUG nova.network.neutron [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Successfully updated port: 1df49c85-dd94-41c4-9c7d-d7df50f0288d {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1241.310553] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.310553] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.310553] env[61545]: DEBUG nova.objects.instance [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lazy-loading 'resources' on Instance uuid a72b998f-68c0-4f31-8051-9b9ced8ff304 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1241.795109] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-c7b7f132-d863-4271-94be-a3c0aaed43fa" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.795332] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-c7b7f132-d863-4271-94be-a3c0aaed43fa" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.795509] env[61545]: DEBUG nova.network.neutron [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1241.981370] env[61545]: DEBUG nova.network.neutron [-] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.007647] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e10aa0-e33f-44cc-b1f0-3d2144fbb630 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.019039] env[61545]: DEBUG nova.compute.manager [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Received event network-vif-plugged-1df49c85-dd94-41c4-9c7d-d7df50f0288d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1242.019317] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] Acquiring lock "c7b7f132-d863-4271-94be-a3c0aaed43fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.019507] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.019836] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.019836] env[61545]: DEBUG nova.compute.manager [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] No waiting events found dispatching network-vif-plugged-1df49c85-dd94-41c4-9c7d-d7df50f0288d {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1242.020029] env[61545]: WARNING nova.compute.manager [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Received unexpected event network-vif-plugged-1df49c85-dd94-41c4-9c7d-d7df50f0288d for instance with vm_state building and task_state spawning. [ 1242.020685] env[61545]: DEBUG nova.compute.manager [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Received event network-changed-1df49c85-dd94-41c4-9c7d-d7df50f0288d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1242.020685] env[61545]: DEBUG nova.compute.manager [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Refreshing instance network info cache due to event network-changed-1df49c85-dd94-41c4-9c7d-d7df50f0288d. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1242.020685] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] Acquiring lock "refresh_cache-c7b7f132-d863-4271-94be-a3c0aaed43fa" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.025896] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bd5861-32e3-4a25-b298-628114fa7e59 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.067267] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ea99b5-1ba1-40ce-bbef-539620e65391 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.080707] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Completed reading data from the image iterator. {{(pid=61545) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1242.080932] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a5786b-72a9-65e5-7592-a2a1fd7f5c6b/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1242.082443] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d51c23-4cb3-45e0-aff0-56a31b78cc3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.087972] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ab6683-7e69-4d85-8aef-9174029be14a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.103290] env[61545]: DEBUG nova.compute.provider_tree [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1242.106420] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a5786b-72a9-65e5-7592-a2a1fd7f5c6b/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1242.106420] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a5786b-72a9-65e5-7592-a2a1fd7f5c6b/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1242.106420] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d1329ff5-34d8-4699-bc61-1c2ce379f1f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.316622] env[61545]: DEBUG oslo_vmware.rw_handles [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a5786b-72a9-65e5-7592-a2a1fd7f5c6b/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1242.316830] env[61545]: INFO nova.virt.vmwareapi.images [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Downloaded image file data 30fdee25-4339-4402-9519-ec7f05638745 [ 1242.317705] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c43b354-0219-4d33-89b1-4ecf80c382d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.336951] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32346a8f-d66b-403b-8e54-46dd99dec0ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.343910] env[61545]: DEBUG nova.network.neutron [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1242.380967] env[61545]: INFO nova.virt.vmwareapi.images [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] The imported VM was unregistered [ 1242.383447] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Caching image {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1242.383704] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Creating directory with path [datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1242.383998] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-636ba1f3-8847-4ed2-96b4-6b8b59a58675 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.397906] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Created directory with path [datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745 {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1242.397906] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667/OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667.vmdk to [datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745/30fdee25-4339-4402-9519-ec7f05638745.vmdk. {{(pid=61545) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1242.398120] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-24074448-4f03-4533-8ae5-469d82715e3f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.405885] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1242.405885] env[61545]: value = "task-4256823" [ 1242.405885] env[61545]: _type = "Task" [ 1242.405885] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.413547] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256823, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.490127] env[61545]: INFO nova.compute.manager [-] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Took 1.87 seconds to deallocate network for instance. [ 1242.545989] env[61545]: DEBUG nova.network.neutron [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Updating instance_info_cache with network_info: [{"id": "1df49c85-dd94-41c4-9c7d-d7df50f0288d", "address": "fa:16:3e:c3:ef:e6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1df49c85-dd", "ovs_interfaceid": "1df49c85-dd94-41c4-9c7d-d7df50f0288d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.630722] env[61545]: ERROR nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [req-46110ebd-aa8e-47c9-8389-5fef7f38cd39] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-46110ebd-aa8e-47c9-8389-5fef7f38cd39"}]} [ 1242.655635] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1242.675759] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1242.676280] env[61545]: DEBUG nova.compute.provider_tree [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1242.692445] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1242.724557] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1242.882824] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea37951-f96c-42a0-b5fb-5e7d644dc954 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.894533] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e507d5-c12f-4ea7-84d9-5b1734cea31c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.937919] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7158e9-74dc-4106-9d0c-423bb63ae6f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.957387] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33fc362f-ac5c-4e69-8984-c833d57c7921 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.965803] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256823, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.984105] env[61545]: DEBUG nova.compute.provider_tree [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1242.997742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.049289] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-c7b7f132-d863-4271-94be-a3c0aaed43fa" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.050034] env[61545]: DEBUG nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Instance network_info: |[{"id": "1df49c85-dd94-41c4-9c7d-d7df50f0288d", "address": "fa:16:3e:c3:ef:e6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1df49c85-dd", "ovs_interfaceid": "1df49c85-dd94-41c4-9c7d-d7df50f0288d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1243.050034] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] Acquired lock "refresh_cache-c7b7f132-d863-4271-94be-a3c0aaed43fa" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.050272] env[61545]: DEBUG nova.network.neutron [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Refreshing network info cache for port 1df49c85-dd94-41c4-9c7d-d7df50f0288d {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1243.051838] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:ef:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1df49c85-dd94-41c4-9c7d-d7df50f0288d', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1243.059781] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating folder: Project (b64f16b672ff471ba1d48aa2490b9829). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1243.063775] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0fbcd2c8-0198-4f1a-b3c2-ef5b92a85c7d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.084610] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created folder: Project (b64f16b672ff471ba1d48aa2490b9829) in parent group-v838542. [ 1243.084610] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating folder: Instances. Parent ref: group-v838848. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1243.084610] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb9d8d74-194e-4e0a-b231-c7da67edfe3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.096700] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created folder: Instances in parent group-v838848. [ 1243.097481] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1243.097481] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1243.097623] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80aa63fe-0ea9-4901-8219-d3a2f0ecfdf8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.126258] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1243.126258] env[61545]: value = "task-4256827" [ 1243.126258] env[61545]: _type = "Task" [ 1243.126258] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.139096] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256827, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.382926] env[61545]: DEBUG nova.network.neutron [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Updated VIF entry in instance network info cache for port 1df49c85-dd94-41c4-9c7d-d7df50f0288d. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1243.383070] env[61545]: DEBUG nova.network.neutron [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Updating instance_info_cache with network_info: [{"id": "1df49c85-dd94-41c4-9c7d-d7df50f0288d", "address": "fa:16:3e:c3:ef:e6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1df49c85-dd", "ovs_interfaceid": "1df49c85-dd94-41c4-9c7d-d7df50f0288d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.445038] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256823, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.507876] env[61545]: ERROR nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [req-6a7df439-b84d-4f5a-bddd-0370e84b34cf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6a7df439-b84d-4f5a-bddd-0370e84b34cf"}]} [ 1243.527593] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1243.544099] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1243.544441] env[61545]: DEBUG nova.compute.provider_tree [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1243.558397] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1243.583318] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1243.644497] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256827, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.715478] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.715724] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.755531] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b551fbf-21b5-4b71-b0f9-c2d46720267a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.767614] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ab9048-f9dd-4ca7-9d74-0a011d25bddd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.804863] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2ea70d-c4d8-4e29-ac43-0feb2055432b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.815687] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d10c2f-9077-435f-84db-2619209cdffb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.834554] env[61545]: DEBUG nova.compute.provider_tree [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1243.886523] env[61545]: DEBUG oslo_concurrency.lockutils [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] Releasing lock "refresh_cache-c7b7f132-d863-4271-94be-a3c0aaed43fa" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.886945] env[61545]: DEBUG nova.compute.manager [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Received event network-vif-deleted-a89c03e7-6504-4eca-9dc3-110100bbf69c {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1243.887189] env[61545]: INFO nova.compute.manager [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Neutron deleted interface a89c03e7-6504-4eca-9dc3-110100bbf69c; detaching it from the instance and deleting it from the info cache [ 1243.887443] env[61545]: DEBUG nova.network.neutron [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.943385] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256823, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.140061] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256827, 'name': CreateVM_Task, 'duration_secs': 0.943762} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.140061] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1244.140856] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.141056] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.141380] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1244.141659] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15fcb0af-e738-43aa-a6f9-208745e8d8cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.148888] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1244.148888] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d5dee-ade3-4489-049a-3ceb156e3e56" [ 1244.148888] env[61545]: _type = "Task" [ 1244.148888] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.160959] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d5dee-ade3-4489-049a-3ceb156e3e56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.219171] env[61545]: DEBUG nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1244.370028] env[61545]: DEBUG nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 159 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1244.370293] env[61545]: DEBUG nova.compute.provider_tree [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 159 to 160 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1244.370507] env[61545]: DEBUG nova.compute.provider_tree [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1244.390788] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af35eeb0-f2c5-40ee-b0db-b170608c47d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.405175] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cde139-7cd8-4785-9172-832296c863e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.437861] env[61545]: DEBUG nova.compute.manager [req-fdf760c3-ae95-4cd7-868c-b2daf368b119 req-a1d81405-be40-4570-b49c-5612e3109ffa service nova] [instance: c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478] Detach interface failed, port_id=a89c03e7-6504-4eca-9dc3-110100bbf69c, reason: Instance c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1244.449184] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256823, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.662580] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]522d5dee-ade3-4489-049a-3ceb156e3e56, 'name': SearchDatastore_Task, 'duration_secs': 0.082449} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.662745] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.662960] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1244.664076] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.664076] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.664076] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1244.664343] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57b69c26-8e4a-4245-a36d-7819efe84225 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.684909] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1244.685225] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1244.686108] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2424c65d-c75f-4115-9356-a15cbcd2e6f0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.695840] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1244.695840] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5288e0b3-ac27-767b-3504-aec07b0b048e" [ 1244.695840] env[61545]: _type = "Task" [ 1244.695840] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.706239] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5288e0b3-ac27-767b-3504-aec07b0b048e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.741426] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.875716] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.566s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.880032] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.881s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.880032] env[61545]: DEBUG nova.objects.instance [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'resources' on Instance uuid c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1244.899299] env[61545]: INFO nova.scheduler.client.report [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted allocations for instance a72b998f-68c0-4f31-8051-9b9ced8ff304 [ 1244.947616] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256823, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.456686} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.947882] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667/OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667.vmdk to [datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745/30fdee25-4339-4402-9519-ec7f05638745.vmdk. [ 1244.948086] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Cleaning up location [datastore2] OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1244.948254] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_00722a29-0908-4a82-99be-cf92626f4667 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.948509] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6443c2a-d40d-45a2-8679-2c1cfbe1960c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.954947] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1244.954947] env[61545]: value = "task-4256828" [ 1244.954947] env[61545]: _type = "Task" [ 1244.954947] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.963800] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.060461] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1245.060729] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838847', 'volume_id': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'name': 'volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5393730d-6a4b-418a-9047-4287f87c8d14', 'attached_at': '', 'detached_at': '', 'volume_id': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'serial': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1245.061656] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74f0d07-8656-4bf0-8753-53a48dcf36de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.077930] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef325ce1-a7b2-4026-9cad-ec118bc6154b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.102659] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae/volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1245.102957] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf0bbb0a-83fd-4759-937e-e6537936d9c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.120237] env[61545]: DEBUG oslo_vmware.api [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1245.120237] env[61545]: value = "task-4256829" [ 1245.120237] env[61545]: _type = "Task" [ 1245.120237] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.128837] env[61545]: DEBUG oslo_vmware.api [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256829, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.205836] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5288e0b3-ac27-767b-3504-aec07b0b048e, 'name': SearchDatastore_Task, 'duration_secs': 0.087935} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.206651] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ded5bee0-772c-4ca5-9b06-7cf18ef1718b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.211832] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1245.211832] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5291f08f-513e-c184-0392-dcb60ec437e9" [ 1245.211832] env[61545]: _type = "Task" [ 1245.211832] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.220151] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5291f08f-513e-c184-0392-dcb60ec437e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.407196] env[61545]: DEBUG oslo_concurrency.lockutils [None req-42c7a004-aa6b-46e7-a642-fd297ffc8294 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "a72b998f-68c0-4f31-8051-9b9ced8ff304" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.563s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.465306] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033359} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.467748] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1245.467919] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Releasing lock "[datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745/30fdee25-4339-4402-9519-ec7f05638745.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.468205] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745/30fdee25-4339-4402-9519-ec7f05638745.vmdk to [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2/bd259162-c8ea-4408-9b7c-c91b9fbfc0d2.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1245.468651] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92dc60fa-62ab-4753-9b71-292bfd604709 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.475712] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1245.475712] env[61545]: value = "task-4256830" [ 1245.475712] env[61545]: _type = "Task" [ 1245.475712] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.487217] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.541015] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1076ad2b-9aff-4cac-8b2c-c94b01edaffd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.550119] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5e18d7-f725-4f97-b632-833815c1173d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.584057] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceaf43d9-44a9-4e20-af05-11f308f9d2cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.592576] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda637a8-8bd4-4fca-950d-e1b79932d4a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.606866] env[61545]: DEBUG nova.compute.provider_tree [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1245.632176] env[61545]: DEBUG oslo_vmware.api [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256829, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.727935] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5291f08f-513e-c184-0392-dcb60ec437e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009619} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.763177] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.763177] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa/c7b7f132-d863-4271-94be-a3c0aaed43fa.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1245.763177] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7fc9faf-41d9-4894-8bf7-2d07b92181d9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.763177] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1245.763177] env[61545]: value = "task-4256831" [ 1245.763177] env[61545]: _type = "Task" [ 1245.763177] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.763177] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.991717] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256830, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.111338] env[61545]: DEBUG nova.scheduler.client.report [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1246.134017] env[61545]: DEBUG oslo_vmware.api [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256829, 'name': ReconfigVM_Task, 'duration_secs': 0.627978} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.134392] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae/volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1246.139713] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68341073-1fb9-4b17-ad92-95bb8fda2d01 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.158654] env[61545]: DEBUG oslo_vmware.api [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1246.158654] env[61545]: value = "task-4256832" [ 1246.158654] env[61545]: _type = "Task" [ 1246.158654] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.171825] env[61545]: DEBUG oslo_vmware.api [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256832, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.250552] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.490391] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256830, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.532806] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "f1243572-2768-4e0f-b3e7-26aa4554d987" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.533135] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "f1243572-2768-4e0f-b3e7-26aa4554d987" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.615785] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.737s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.618473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.877s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.620581] env[61545]: INFO nova.compute.claims [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1246.633873] env[61545]: INFO nova.scheduler.client.report [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted allocations for instance c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478 [ 1246.673827] env[61545]: DEBUG oslo_vmware.api [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256832, 'name': ReconfigVM_Task, 'duration_secs': 0.266008} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.674193] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838847', 'volume_id': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'name': 'volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5393730d-6a4b-418a-9047-4287f87c8d14', 'attached_at': '', 'detached_at': '', 'volume_id': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'serial': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1246.752086] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.990205] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256830, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.036043] env[61545]: DEBUG nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1247.142879] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f913548-3330-4079-8926-8ea510fa6a37 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "c5ded6c4-b8ad-4c4d-8b4a-a548b86d7478" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.161s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.253019] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256831, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.490783] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256830, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.563717] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.756695] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256831, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.772349] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8aee335-d572-4c48-bd02-67c2139ea0c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.783205] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6696d779-65ef-4bde-880b-55893a0ac1e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.825536] env[61545]: DEBUG nova.objects.instance [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'flavor' on Instance uuid 5393730d-6a4b-418a-9047-4287f87c8d14 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.829058] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e21285-c876-4734-8f1d-a0661b55aeac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.841384] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4e1aa2-9011-498d-ac9d-cb3b6c345a43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.861280] env[61545]: DEBUG nova.compute.provider_tree [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.990940] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256830, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.252130] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256831, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.462484} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.252396] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa/c7b7f132-d863-4271-94be-a3c0aaed43fa.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1248.252614] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1248.252869] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-609f908f-72c6-4bf6-b9a3-368187fc8b73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.259481] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1248.259481] env[61545]: value = "task-4256833" [ 1248.259481] env[61545]: _type = "Task" [ 1248.259481] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.268276] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256833, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.332027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-46a835b8-7abe-487d-9a5f-49980aa5008c tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.908s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.364807] env[61545]: DEBUG nova.scheduler.client.report [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1248.491251] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256830, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.708846} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.491730] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/30fdee25-4339-4402-9519-ec7f05638745/30fdee25-4339-4402-9519-ec7f05638745.vmdk to [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2/bd259162-c8ea-4408-9b7c-c91b9fbfc0d2.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1248.492885] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e23dd15-2020-42b4-8192-9c3fae3af5e7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.521340] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2/bd259162-c8ea-4408-9b7c-c91b9fbfc0d2.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.521660] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53177843-f8e1-4fb1-bcef-82bc7786a2a9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.544883] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1248.544883] env[61545]: value = "task-4256834" [ 1248.544883] env[61545]: _type = "Task" [ 1248.544883] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.554233] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256834, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.769942] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256833, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066232} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.770278] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1248.771159] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e734a288-7b68-4d1c-98b3-441188d7b4ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.795225] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa/c7b7f132-d863-4271-94be-a3c0aaed43fa.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.795490] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8d68396-f3fc-449a-874d-ce2271da0a89 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.819135] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1248.819135] env[61545]: value = "task-4256835" [ 1248.819135] env[61545]: _type = "Task" [ 1248.819135] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.830925] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256835, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.870913] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.871640] env[61545]: DEBUG nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1248.875081] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.311s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.876289] env[61545]: INFO nova.compute.claims [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1249.041736] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "5393730d-6a4b-418a-9047-4287f87c8d14" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.041736] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.056282] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256834, 'name': ReconfigVM_Task, 'duration_secs': 0.318841} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.056587] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Reconfigured VM instance instance-00000066 to attach disk [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2/bd259162-c8ea-4408-9b7c-c91b9fbfc0d2.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.057486] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-149d6e6c-50f8-4810-a6ac-64d267ccfefd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.065027] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1249.065027] env[61545]: value = "task-4256836" [ 1249.065027] env[61545]: _type = "Task" [ 1249.065027] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.074462] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256836, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.330532] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256835, 'name': ReconfigVM_Task, 'duration_secs': 0.288882} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.330532] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Reconfigured VM instance instance-0000006e to attach disk [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa/c7b7f132-d863-4271-94be-a3c0aaed43fa.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.331018] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ee72dff-3b19-4741-a897-a08503d7792a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.337719] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1249.337719] env[61545]: value = "task-4256837" [ 1249.337719] env[61545]: _type = "Task" [ 1249.337719] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.346278] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256837, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.381033] env[61545]: DEBUG nova.compute.utils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1249.384429] env[61545]: DEBUG nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1249.386128] env[61545]: DEBUG nova.network.neutron [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1249.461129] env[61545]: DEBUG nova.policy [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1249.514029] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "99c9b4ab-efcf-4e13-bd92-c634972fe082" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.514271] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "99c9b4ab-efcf-4e13-bd92-c634972fe082" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.545280] env[61545]: INFO nova.compute.manager [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Detaching volume 44c6fe87-2763-45ca-bf43-c6ba0476f9ae [ 1249.575557] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256836, 'name': Rename_Task, 'duration_secs': 0.135075} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.575885] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1249.576157] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04ab34db-84e2-456c-bd7f-77746c9d86f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.583811] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1249.583811] env[61545]: value = "task-4256838" [ 1249.583811] env[61545]: _type = "Task" [ 1249.583811] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.588602] env[61545]: INFO nova.virt.block_device [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Attempting to driver detach volume 44c6fe87-2763-45ca-bf43-c6ba0476f9ae from mountpoint /dev/sdb [ 1249.588914] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1249.589097] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838847', 'volume_id': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'name': 'volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5393730d-6a4b-418a-9047-4287f87c8d14', 'attached_at': '', 'detached_at': '', 'volume_id': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'serial': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1249.589894] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc5b9b0-3df7-488a-ae87-b20d351487cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.598161] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.617526] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa80d341-93cb-45c1-b260-e43ee3f970ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.625707] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8dd0ed9-7b0e-48c9-ae74-2405ceabb87f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.648829] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070bd8c9-4920-4b8a-a44f-193bf9e8f350 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.664582] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] The volume has not been displaced from its original location: [datastore1] volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae/volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1249.669932] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1249.670275] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49b79a4e-9a5d-41f7-8c79-742ec8923c07 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.688114] env[61545]: DEBUG oslo_vmware.api [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1249.688114] env[61545]: value = "task-4256839" [ 1249.688114] env[61545]: _type = "Task" [ 1249.688114] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.697166] env[61545]: DEBUG oslo_vmware.api [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256839, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.783231] env[61545]: DEBUG nova.network.neutron [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Successfully created port: 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1249.850035] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256837, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.884948] env[61545]: DEBUG nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1250.016471] env[61545]: DEBUG nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1250.035294] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7236e1-ab26-4c61-9305-ec644b644988 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.043440] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59e6a3a-a8de-4be8-800f-e3c280e2e37a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.076978] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d73cbf-a1a6-4d85-bb88-0db44ebbc7ed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.088926] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ba6c32-d5eb-408b-a08e-32ab402fcdef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.106781] env[61545]: DEBUG oslo_vmware.api [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256838, 'name': PowerOnVM_Task, 'duration_secs': 0.503549} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.107590] env[61545]: DEBUG nova.compute.provider_tree [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1250.109345] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1250.199522] env[61545]: DEBUG oslo_vmware.api [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256839, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.210925] env[61545]: DEBUG nova.compute.manager [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1250.211898] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50314438-195f-432a-ba99-dec04aa607cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.351702] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256837, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.540182] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.611048] env[61545]: DEBUG nova.scheduler.client.report [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1250.699912] env[61545]: DEBUG oslo_vmware.api [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256839, 'name': ReconfigVM_Task, 'duration_secs': 0.979316} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.700223] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1250.704931] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a212f67e-ef13-4651-9bb8-632e2ef366d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.720770] env[61545]: DEBUG oslo_vmware.api [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1250.720770] env[61545]: value = "task-4256840" [ 1250.720770] env[61545]: _type = "Task" [ 1250.720770] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.732759] env[61545]: DEBUG oslo_vmware.api [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256840, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.733223] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4d6c4403-1587-4897-af76-d0f666e78244 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.329s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.734076] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.682s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.734264] env[61545]: INFO nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] During sync_power_state the instance has a pending task (spawning). Skip. [ 1250.734437] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.851646] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256837, 'name': Rename_Task, 'duration_secs': 1.127781} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.852410] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1250.852410] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fcb7493-c472-4e56-ad4d-452bfa9594ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.860058] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1250.860058] env[61545]: value = "task-4256841" [ 1250.860058] env[61545]: _type = "Task" [ 1250.860058] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.869306] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.897458] env[61545]: DEBUG nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1250.924874] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1250.925174] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.925336] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1250.925516] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.925660] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1250.925805] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1250.926128] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1250.926278] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1250.926478] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1250.926655] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1250.926830] env[61545]: DEBUG nova.virt.hardware [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1250.928184] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e268fc-c59b-4bce-9f10-85f8ed9fb7c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.935733] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3148e296-f1eb-4704-8127-7220c9600114 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.116374] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.242s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.116927] env[61545]: DEBUG nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1251.119675] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.580s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.121133] env[61545]: INFO nova.compute.claims [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1251.232340] env[61545]: DEBUG oslo_vmware.api [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256840, 'name': ReconfigVM_Task, 'duration_secs': 0.146305} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.232676] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838847', 'volume_id': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'name': 'volume-44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5393730d-6a4b-418a-9047-4287f87c8d14', 'attached_at': '', 'detached_at': '', 'volume_id': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae', 'serial': '44c6fe87-2763-45ca-bf43-c6ba0476f9ae'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1251.272432] env[61545]: DEBUG nova.compute.manager [req-b72fbc6b-5159-45df-aa3d-e595730b9609 req-4815b0da-534c-4df9-925c-4dfe959badda service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-vif-plugged-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1251.272769] env[61545]: DEBUG oslo_concurrency.lockutils [req-b72fbc6b-5159-45df-aa3d-e595730b9609 req-4815b0da-534c-4df9-925c-4dfe959badda service nova] Acquiring lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.273207] env[61545]: DEBUG oslo_concurrency.lockutils [req-b72fbc6b-5159-45df-aa3d-e595730b9609 req-4815b0da-534c-4df9-925c-4dfe959badda service nova] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.273349] env[61545]: DEBUG oslo_concurrency.lockutils [req-b72fbc6b-5159-45df-aa3d-e595730b9609 req-4815b0da-534c-4df9-925c-4dfe959badda service nova] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.273607] env[61545]: DEBUG nova.compute.manager [req-b72fbc6b-5159-45df-aa3d-e595730b9609 req-4815b0da-534c-4df9-925c-4dfe959badda service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] No waiting events found dispatching network-vif-plugged-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1251.273869] env[61545]: WARNING nova.compute.manager [req-b72fbc6b-5159-45df-aa3d-e595730b9609 req-4815b0da-534c-4df9-925c-4dfe959badda service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received unexpected event network-vif-plugged-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 for instance with vm_state building and task_state spawning. [ 1251.370322] env[61545]: DEBUG oslo_vmware.api [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256841, 'name': PowerOnVM_Task, 'duration_secs': 0.474531} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.370745] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1251.370858] env[61545]: INFO nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Took 11.01 seconds to spawn the instance on the hypervisor. [ 1251.370984] env[61545]: DEBUG nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1251.371807] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17157ef6-43a9-46ee-a53c-f11b84170d37 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.395482] env[61545]: DEBUG nova.network.neutron [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Successfully updated port: 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1251.626506] env[61545]: DEBUG nova.compute.utils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1251.630165] env[61545]: DEBUG nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1251.630345] env[61545]: DEBUG nova.network.neutron [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1251.721167] env[61545]: DEBUG nova.policy [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d0d78511dd5408cba4db4e57271b5c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b790c7b2af394de28f7f42ce0d230346', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1251.782236] env[61545]: DEBUG nova.objects.instance [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'flavor' on Instance uuid 5393730d-6a4b-418a-9047-4287f87c8d14 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.891689] env[61545]: INFO nova.compute.manager [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Took 15.86 seconds to build instance. [ 1251.898233] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.898424] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.898526] env[61545]: DEBUG nova.network.neutron [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.126446] env[61545]: DEBUG nova.network.neutron [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Successfully created port: fd769309-365b-44c3-a088-5d1dc94d9e5a {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1252.134067] env[61545]: DEBUG nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1252.272036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33c64fe-3a84-4d0a-a8ed-43116d502bfa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.281245] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6e7da9-4afe-4aa1-8507-dcf6c432496f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.317618] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fbc9205-ff7c-4163-854b-fae3322702bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.326387] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82727799-9a90-414a-ac51-fb7b5dfa49d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.343096] env[61545]: DEBUG nova.compute.provider_tree [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.395650] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dadcdd80-a581-4141-97ef-6e8f3d292c86 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.382s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.395986] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.342s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.397009] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910bc6ef-24cb-4eb4-b074-bb53e2979674 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.430853] env[61545]: DEBUG nova.network.neutron [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1252.452676] env[61545]: INFO nova.compute.manager [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Rebuilding instance [ 1252.497610] env[61545]: DEBUG nova.compute.manager [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1252.499539] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23c8780-eeb5-493c-ae9c-369d3075fbcd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.593029] env[61545]: DEBUG nova.network.neutron [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.789202] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ab29608c-fce2-4e8d-92e1-f68844db047e tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.747s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.846231] env[61545]: DEBUG nova.scheduler.client.report [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1252.909476] env[61545]: INFO nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] During sync_power_state the instance has a pending task (rebuilding). Skip. [ 1252.909706] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.514s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.096392] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.096836] env[61545]: DEBUG nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Instance network_info: |[{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1253.097376] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:ae:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1253.105428] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.105704] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1253.106391] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec46f30e-9a43-46f9-96eb-a5a6f0b5e069 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.127373] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1253.127373] env[61545]: value = "task-4256842" [ 1253.127373] env[61545]: _type = "Task" [ 1253.127373] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.136144] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256842, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.143035] env[61545]: DEBUG nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1253.171565] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1253.171810] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1253.171968] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1253.172176] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1253.172322] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1253.172468] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1253.172679] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1253.172839] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1253.173046] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1253.173227] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1253.173401] env[61545]: DEBUG nova.virt.hardware [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1253.174278] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0da12a7-c145-4585-9249-b2d25db3f15b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.182649] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316b66c9-d64b-4e0f-9994-01a58c8975a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.302845] env[61545]: DEBUG nova.compute.manager [req-3a31e606-033c-417c-855c-6119710e5fe8 req-44284292-a14f-43bd-9375-6725e7db93df service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1253.303012] env[61545]: DEBUG nova.compute.manager [req-3a31e606-033c-417c-855c-6119710e5fe8 req-44284292-a14f-43bd-9375-6725e7db93df service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing instance network info cache due to event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1253.303282] env[61545]: DEBUG oslo_concurrency.lockutils [req-3a31e606-033c-417c-855c-6119710e5fe8 req-44284292-a14f-43bd-9375-6725e7db93df service nova] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.303432] env[61545]: DEBUG oslo_concurrency.lockutils [req-3a31e606-033c-417c-855c-6119710e5fe8 req-44284292-a14f-43bd-9375-6725e7db93df service nova] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.303608] env[61545]: DEBUG nova.network.neutron [req-3a31e606-033c-417c-855c-6119710e5fe8 req-44284292-a14f-43bd-9375-6725e7db93df service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1253.352402] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.352935] env[61545]: DEBUG nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1253.449704] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "5393730d-6a4b-418a-9047-4287f87c8d14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.450052] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.450280] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "5393730d-6a4b-418a-9047-4287f87c8d14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.450476] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.450642] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.453169] env[61545]: INFO nova.compute.manager [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Terminating instance [ 1253.513070] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.513423] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cfa96b5a-2437-45d6-a6d3-a776fe38b820 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.523218] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1253.523218] env[61545]: value = "task-4256843" [ 1253.523218] env[61545]: _type = "Task" [ 1253.523218] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.532968] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.639620] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256842, 'name': CreateVM_Task, 'duration_secs': 0.336492} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.639620] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1253.651197] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.651197] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.651197] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1253.651197] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61dd1de1-ca42-4485-b252-58cf8b93ceee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.655961] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1253.655961] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218eaa8-cced-f451-806b-3984312d41e8" [ 1253.655961] env[61545]: _type = "Task" [ 1253.655961] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.663109] env[61545]: DEBUG nova.network.neutron [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Successfully updated port: fd769309-365b-44c3-a088-5d1dc94d9e5a {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1253.669976] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218eaa8-cced-f451-806b-3984312d41e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.721661] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.857592] env[61545]: DEBUG nova.compute.utils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1253.859224] env[61545]: DEBUG nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1253.859428] env[61545]: DEBUG nova.network.neutron [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1253.913552] env[61545]: DEBUG nova.policy [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82de8ada56cd46319fe4c7ecd4957abb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da39b1ee6df640b89a9dab58e3380397', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1253.956915] env[61545]: DEBUG nova.compute.manager [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1253.957177] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1253.958088] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5b9772-0073-4735-87c8-095e5689ce2a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.969420] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.969675] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56ba4ca8-4a28-49d7-a540-3df1da8636f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.977512] env[61545]: DEBUG oslo_vmware.api [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1253.977512] env[61545]: value = "task-4256844" [ 1253.977512] env[61545]: _type = "Task" [ 1253.977512] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.987813] env[61545]: DEBUG oslo_vmware.api [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.036623] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256843, 'name': PowerOffVM_Task, 'duration_secs': 0.203035} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.036969] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1254.037264] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1254.038127] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf343c3-8102-45fb-b6c9-e4614505fbc1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.047057] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1254.047426] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07a572c8-d6ec-4074-b916-8db52ddb02ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.114265] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1254.114542] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1254.114728] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleting the datastore file [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1254.115053] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e51f668-b229-4bc7-818a-67e8f022f281 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.122134] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1254.122134] env[61545]: value = "task-4256846" [ 1254.122134] env[61545]: _type = "Task" [ 1254.122134] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.131189] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.167288] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5218eaa8-cced-f451-806b-3984312d41e8, 'name': SearchDatastore_Task, 'duration_secs': 0.014941} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.167707] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.167991] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1254.168311] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.168499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.168793] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1254.169218] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7b1929d-8b23-4697-8622-603b0d7466ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.172026] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "refresh_cache-f1243572-2768-4e0f-b3e7-26aa4554d987" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.172212] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "refresh_cache-f1243572-2768-4e0f-b3e7-26aa4554d987" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.172416] env[61545]: DEBUG nova.network.neutron [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1254.187037] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.189031] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.193674] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1254.193958] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1254.194823] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d433762d-ff37-4744-84db-19dfcd06b2b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.203467] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1254.203467] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a03482-5459-c291-5ffe-bd75df0a0766" [ 1254.203467] env[61545]: _type = "Task" [ 1254.203467] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.208527] env[61545]: DEBUG nova.network.neutron [req-3a31e606-033c-417c-855c-6119710e5fe8 req-44284292-a14f-43bd-9375-6725e7db93df service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updated VIF entry in instance network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1254.209029] env[61545]: DEBUG nova.network.neutron [req-3a31e606-033c-417c-855c-6119710e5fe8 req-44284292-a14f-43bd-9375-6725e7db93df service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.217086] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a03482-5459-c291-5ffe-bd75df0a0766, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.363504] env[61545]: DEBUG nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1254.436107] env[61545]: DEBUG nova.network.neutron [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Successfully created port: 30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1254.488402] env[61545]: DEBUG oslo_vmware.api [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256844, 'name': PowerOffVM_Task, 'duration_secs': 0.243401} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.488749] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1254.488948] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1254.489133] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58103b05-e986-4de2-aad6-28da3b5d835e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.551678] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1254.552043] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1254.552304] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleting the datastore file [datastore2] 5393730d-6a4b-418a-9047-4287f87c8d14 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1254.552620] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7e6f1a9-2e8b-40e8-bded-c5eedd0ad110 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.560663] env[61545]: DEBUG oslo_vmware.api [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for the task: (returnval){ [ 1254.560663] env[61545]: value = "task-4256848" [ 1254.560663] env[61545]: _type = "Task" [ 1254.560663] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.571316] env[61545]: DEBUG oslo_vmware.api [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.632285] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.418898} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.632550] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.632732] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1254.632905] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1254.704287] env[61545]: DEBUG nova.network.neutron [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1254.712322] env[61545]: DEBUG oslo_concurrency.lockutils [req-3a31e606-033c-417c-855c-6119710e5fe8 req-44284292-a14f-43bd-9375-6725e7db93df service nova] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.716599] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a03482-5459-c291-5ffe-bd75df0a0766, 'name': SearchDatastore_Task, 'duration_secs': 0.016069} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.717385] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b977ff0-dc0c-4383-b635-0e3486b55705 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.723676] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1254.723676] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52144510-1473-0ee3-0612-54b2410cccb2" [ 1254.723676] env[61545]: _type = "Task" [ 1254.723676] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.731692] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52144510-1473-0ee3-0612-54b2410cccb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.887278] env[61545]: DEBUG nova.network.neutron [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Updating instance_info_cache with network_info: [{"id": "fd769309-365b-44c3-a088-5d1dc94d9e5a", "address": "fa:16:3e:46:40:1c", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd769309-36", "ovs_interfaceid": "fd769309-365b-44c3-a088-5d1dc94d9e5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.071020] env[61545]: DEBUG oslo_vmware.api [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Task: {'id': task-4256848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347712} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.071020] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1255.071236] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1255.071236] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1255.071399] env[61545]: INFO nova.compute.manager [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1255.071639] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1255.071849] env[61545]: DEBUG nova.compute.manager [-] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1255.071950] env[61545]: DEBUG nova.network.neutron [-] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1255.236453] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52144510-1473-0ee3-0612-54b2410cccb2, 'name': SearchDatastore_Task, 'duration_secs': 0.011143} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.236751] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.236978] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e24a6086-7dd1-4e75-b49e-dcc7c28eaea8/e24a6086-7dd1-4e75-b49e-dcc7c28eaea8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1255.237574] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4207700b-78bf-4fe2-911e-bbd1b0971310 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.245222] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1255.245222] env[61545]: value = "task-4256849" [ 1255.245222] env[61545]: _type = "Task" [ 1255.245222] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.254334] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.372768] env[61545]: DEBUG nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1255.390642] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "refresh_cache-f1243572-2768-4e0f-b3e7-26aa4554d987" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.390862] env[61545]: DEBUG nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Instance network_info: |[{"id": "fd769309-365b-44c3-a088-5d1dc94d9e5a", "address": "fa:16:3e:46:40:1c", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd769309-36", "ovs_interfaceid": "fd769309-365b-44c3-a088-5d1dc94d9e5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1255.391429] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:40:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a071ecf4-e713-4f97-9271-8c17952f6dee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd769309-365b-44c3-a088-5d1dc94d9e5a', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1255.404853] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1255.407829] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1255.408507] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a467f34a-5d0c-4e5f-a09d-a676e95b3120 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.428662] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1255.429114] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1255.429409] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1255.429766] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1255.430049] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1255.430353] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1255.430699] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1255.430980] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1255.431353] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1255.431802] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1255.432128] env[61545]: DEBUG nova.virt.hardware [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1255.433621] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e86ad72-71cb-4dac-a931-95152f6780ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.450065] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed572827-1513-4505-b83f-131f0fc0aa4b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.452295] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1255.452295] env[61545]: value = "task-4256850" [ 1255.452295] env[61545]: _type = "Task" [ 1255.452295] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.475316] env[61545]: DEBUG nova.compute.manager [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Received event network-vif-plugged-fd769309-365b-44c3-a088-5d1dc94d9e5a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1255.475647] env[61545]: DEBUG oslo_concurrency.lockutils [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] Acquiring lock "f1243572-2768-4e0f-b3e7-26aa4554d987-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.475930] env[61545]: DEBUG oslo_concurrency.lockutils [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] Lock "f1243572-2768-4e0f-b3e7-26aa4554d987-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.476301] env[61545]: DEBUG oslo_concurrency.lockutils [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] Lock "f1243572-2768-4e0f-b3e7-26aa4554d987-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.476634] env[61545]: DEBUG nova.compute.manager [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] No waiting events found dispatching network-vif-plugged-fd769309-365b-44c3-a088-5d1dc94d9e5a {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1255.477267] env[61545]: WARNING nova.compute.manager [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Received unexpected event network-vif-plugged-fd769309-365b-44c3-a088-5d1dc94d9e5a for instance with vm_state building and task_state spawning. [ 1255.477364] env[61545]: DEBUG nova.compute.manager [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Received event network-changed-fd769309-365b-44c3-a088-5d1dc94d9e5a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1255.477631] env[61545]: DEBUG nova.compute.manager [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Refreshing instance network info cache due to event network-changed-fd769309-365b-44c3-a088-5d1dc94d9e5a. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1255.478029] env[61545]: DEBUG oslo_concurrency.lockutils [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] Acquiring lock "refresh_cache-f1243572-2768-4e0f-b3e7-26aa4554d987" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.478299] env[61545]: DEBUG oslo_concurrency.lockutils [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] Acquired lock "refresh_cache-f1243572-2768-4e0f-b3e7-26aa4554d987" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.478652] env[61545]: DEBUG nova.network.neutron [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Refreshing network info cache for port fd769309-365b-44c3-a088-5d1dc94d9e5a {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1255.489040] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256850, 'name': CreateVM_Task} progress is 15%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.672393] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1255.672797] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1255.673063] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1255.673427] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1255.673621] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1255.673850] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1255.674225] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1255.674455] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1255.674709] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1255.674936] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1255.675224] env[61545]: DEBUG nova.virt.hardware [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1255.676554] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520607af-fd59-4a79-bfeb-aa709c97fb48 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.688265] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01350252-c211-489a-bad8-3b7dd3eab5b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.705769] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:ef:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1df49c85-dd94-41c4-9c7d-d7df50f0288d', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1255.715645] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1255.716096] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1255.716466] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-578d95d2-fa86-4308-ae61-6c57f06825ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.740293] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1255.740293] env[61545]: value = "task-4256851" [ 1255.740293] env[61545]: _type = "Task" [ 1255.740293] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.753735] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256851, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.760555] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256849, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.970617] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256850, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.073030] env[61545]: DEBUG nova.network.neutron [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Successfully updated port: 30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1256.186703] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.213080] env[61545]: DEBUG nova.network.neutron [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Updated VIF entry in instance network info cache for port fd769309-365b-44c3-a088-5d1dc94d9e5a. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1256.213481] env[61545]: DEBUG nova.network.neutron [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Updating instance_info_cache with network_info: [{"id": "fd769309-365b-44c3-a088-5d1dc94d9e5a", "address": "fa:16:3e:46:40:1c", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd769309-36", "ovs_interfaceid": "fd769309-365b-44c3-a088-5d1dc94d9e5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.256888] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256851, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.262448] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256849, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574984} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.262448] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e24a6086-7dd1-4e75-b49e-dcc7c28eaea8/e24a6086-7dd1-4e75-b49e-dcc7c28eaea8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1256.262448] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1256.262448] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b18ba2c-bda0-4cc9-a0f5-261970ef700b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.270850] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1256.270850] env[61545]: value = "task-4256852" [ 1256.270850] env[61545]: _type = "Task" [ 1256.270850] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.276280] env[61545]: DEBUG nova.network.neutron [-] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.281877] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.464062] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256850, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.578280] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.578673] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.578673] env[61545]: DEBUG nova.network.neutron [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1256.691228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.691228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.691228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.691228] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1256.692524] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9bf6da-a80b-4e7a-873d-c5bb8177de11 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.701041] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb03442f-a20d-458d-8afe-38a1e8f2e5a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.716472] env[61545]: DEBUG oslo_concurrency.lockutils [req-1120bd5e-5d65-494e-9bba-935f49049393 req-1e87a01c-9ea1-40ca-93b2-2bbb4a6f15ba service nova] Releasing lock "refresh_cache-f1243572-2768-4e0f-b3e7-26aa4554d987" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.717607] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa8558e-93c6-495d-b8b0-b55843718a0b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.725944] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e635d05-0a8d-4369-b7da-194a6cccf45f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.757577] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179585MB free_disk=246GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1256.757793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.757975] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.769428] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256851, 'name': CreateVM_Task, 'duration_secs': 0.940766} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.769632] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1256.770392] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.770592] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.770998] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1256.771372] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c389b622-45e4-4d1b-a23d-b6518f6031b1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.778776] env[61545]: INFO nova.compute.manager [-] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Took 1.71 seconds to deallocate network for instance. [ 1256.784609] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081391} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.784943] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1256.784943] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525e240b-62d9-cfda-9ae2-b0fc1cab8137" [ 1256.784943] env[61545]: _type = "Task" [ 1256.784943] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.787934] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1256.789179] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f66ad3-a5f3-4784-912c-eed922c832d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.801720] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525e240b-62d9-cfda-9ae2-b0fc1cab8137, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.822468] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] e24a6086-7dd1-4e75-b49e-dcc7c28eaea8/e24a6086-7dd1-4e75-b49e-dcc7c28eaea8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.824583] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84f821a4-c403-47ac-b816-d92c8ba0b367 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.846745] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1256.846745] env[61545]: value = "task-4256853" [ 1256.846745] env[61545]: _type = "Task" [ 1256.846745] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.855775] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256853, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.965135] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256850, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.113945] env[61545]: DEBUG nova.network.neutron [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1257.272664] env[61545]: DEBUG nova.network.neutron [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Updating instance_info_cache with network_info: [{"id": "30b017cf-ed19-4ecc-a917-33a9eff869e2", "address": "fa:16:3e:ac:7c:6b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30b017cf-ed", "ovs_interfaceid": "30b017cf-ed19-4ecc-a917-33a9eff869e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.289529] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.299683] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525e240b-62d9-cfda-9ae2-b0fc1cab8137, 'name': SearchDatastore_Task, 'duration_secs': 0.038188} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.299979] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.300236] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1257.300477] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.300626] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1257.300834] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1257.301121] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2588c08c-6da6-4d80-bb7a-4229e02da1c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.309988] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1257.310187] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1257.310920] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f40e114-6b4d-4b8b-9dc6-83fb5219e60d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.316088] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1257.316088] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e24dc1-9e26-6e88-30ab-b8194dc0382b" [ 1257.316088] env[61545]: _type = "Task" [ 1257.316088] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.323989] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e24dc1-9e26-6e88-30ab-b8194dc0382b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.356759] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256853, 'name': ReconfigVM_Task, 'duration_secs': 0.338748} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.357057] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Reconfigured VM instance instance-0000006f to attach disk [datastore2] e24a6086-7dd1-4e75-b49e-dcc7c28eaea8/e24a6086-7dd1-4e75-b49e-dcc7c28eaea8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.358088] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7648a011-0885-47e4-90e3-9b16d853e5a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.365543] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1257.365543] env[61545]: value = "task-4256854" [ 1257.365543] env[61545]: _type = "Task" [ 1257.365543] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.374213] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256854, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.466779] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256850, 'name': CreateVM_Task, 'duration_secs': 1.731752} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.467182] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1257.469127] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.469334] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1257.469680] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1257.470017] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72ffd3a4-79d0-4efc-b36a-be09da4087d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.476609] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1257.476609] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fd80a5-b1f2-d18d-6aec-5084dfa35331" [ 1257.476609] env[61545]: _type = "Task" [ 1257.476609] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.486400] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fd80a5-b1f2-d18d-6aec-5084dfa35331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.525031] env[61545]: DEBUG nova.compute.manager [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 5393730d-6a4b-418a-9047-4287f87c8d14] Received event network-vif-deleted-ce048a59-c941-4a83-bbf9-29dfc46aae60 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1257.525246] env[61545]: DEBUG nova.compute.manager [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Received event network-vif-plugged-30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1257.525440] env[61545]: DEBUG oslo_concurrency.lockutils [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] Acquiring lock "99c9b4ab-efcf-4e13-bd92-c634972fe082-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.525647] env[61545]: DEBUG oslo_concurrency.lockutils [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] Lock "99c9b4ab-efcf-4e13-bd92-c634972fe082-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.525813] env[61545]: DEBUG oslo_concurrency.lockutils [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] Lock "99c9b4ab-efcf-4e13-bd92-c634972fe082-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.525979] env[61545]: DEBUG nova.compute.manager [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] No waiting events found dispatching network-vif-plugged-30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1257.526159] env[61545]: WARNING nova.compute.manager [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Received unexpected event network-vif-plugged-30b017cf-ed19-4ecc-a917-33a9eff869e2 for instance with vm_state building and task_state spawning. [ 1257.526320] env[61545]: DEBUG nova.compute.manager [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Received event network-changed-30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1257.526469] env[61545]: DEBUG nova.compute.manager [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Refreshing instance network info cache due to event network-changed-30b017cf-ed19-4ecc-a917-33a9eff869e2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1257.526627] env[61545]: DEBUG oslo_concurrency.lockutils [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] Acquiring lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.775413] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.775904] env[61545]: DEBUG nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Instance network_info: |[{"id": "30b017cf-ed19-4ecc-a917-33a9eff869e2", "address": "fa:16:3e:ac:7c:6b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30b017cf-ed", "ovs_interfaceid": "30b017cf-ed19-4ecc-a917-33a9eff869e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1257.776350] env[61545]: DEBUG oslo_concurrency.lockutils [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] Acquired lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1257.776597] env[61545]: DEBUG nova.network.neutron [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Refreshing network info cache for port 30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1257.778199] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:7c:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30b017cf-ed19-4ecc-a917-33a9eff869e2', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1257.786534] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1257.787894] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1257.788154] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef4e885b-16e3-4d29-b39d-0a298827a86b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.803805] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d0f42893-3332-4027-93df-bb46e3350485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804038] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 5393730d-6a4b-418a-9047-4287f87c8d14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804168] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 9a59f45b-727f-45ea-ad33-64fa23aaffe7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804265] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance fde00c6e-29b5-4b99-944a-c0404e4f2fae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804358] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 40bade64-b16b-4a33-a9ea-18f80a32c6bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804462] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804578] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance c7b7f132-d863-4271-94be-a3c0aaed43fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804689] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e24a6086-7dd1-4e75-b49e-dcc7c28eaea8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804774] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance f1243572-2768-4e0f-b3e7-26aa4554d987 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.804829] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 99c9b4ab-efcf-4e13-bd92-c634972fe082 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.805012] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1257.805144] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=250GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1257.814914] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1257.814914] env[61545]: value = "task-4256855" [ 1257.814914] env[61545]: _type = "Task" [ 1257.814914] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.832217] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256855, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.837816] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e24dc1-9e26-6e88-30ab-b8194dc0382b, 'name': SearchDatastore_Task, 'duration_secs': 0.019243} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.838687] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71187480-34c2-4d94-ac35-11cc1b1e0152 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.848757] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1257.848757] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52abba8d-fcfb-e94c-5eaf-d5fb4e788432" [ 1257.848757] env[61545]: _type = "Task" [ 1257.848757] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.860216] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52abba8d-fcfb-e94c-5eaf-d5fb4e788432, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.879084] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256854, 'name': Rename_Task, 'duration_secs': 0.148926} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.879382] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.879636] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6dd48b7-c6eb-401f-b63f-158f39e22b9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.888984] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1257.888984] env[61545]: value = "task-4256856" [ 1257.888984] env[61545]: _type = "Task" [ 1257.888984] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.897697] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.958642] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f5a1dd-4f9c-44bf-b81b-6c67592385ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.967134] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95305a8-6e6a-4055-9672-610945745eff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.002889] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42bce16-65aa-4ece-8953-fed48c5397c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.013510] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52fd80a5-b1f2-d18d-6aec-5084dfa35331, 'name': SearchDatastore_Task, 'duration_secs': 0.014199} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.013942] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1258.014222] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1258.014466] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.015767] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffd8f32-536e-412a-b4e1-4eb45f8d59bd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.032616] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.327405] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256855, 'name': CreateVM_Task, 'duration_secs': 0.378091} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.327614] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1258.328306] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.328471] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1258.328802] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1258.329104] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73d9d1a9-240f-4938-ae4d-85c5e2f37f26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.337059] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1258.337059] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52621dcc-1b06-e631-0bfa-f4986980e27b" [ 1258.337059] env[61545]: _type = "Task" [ 1258.337059] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.346116] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52621dcc-1b06-e631-0bfa-f4986980e27b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.361092] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52abba8d-fcfb-e94c-5eaf-d5fb4e788432, 'name': SearchDatastore_Task, 'duration_secs': 0.013282} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.361367] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1258.361622] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa/c7b7f132-d863-4271-94be-a3c0aaed43fa.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1258.361905] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1258.362111] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1258.362347] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd98ef55-9c50-4a9b-9fb4-cbc61e47997f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.364368] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b342c5e4-1077-4271-937f-b95b9f8116a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.372345] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1258.372345] env[61545]: value = "task-4256857" [ 1258.372345] env[61545]: _type = "Task" [ 1258.372345] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.376946] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1258.377164] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1258.380584] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5097f461-1728-43cd-b78d-c7a9c68895cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.387204] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.391133] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1258.391133] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52461e41-1a5f-55b8-798d-e1be646a4e60" [ 1258.391133] env[61545]: _type = "Task" [ 1258.391133] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.404611] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52461e41-1a5f-55b8-798d-e1be646a4e60, 'name': SearchDatastore_Task} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.408661] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256856, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.408928] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0a3851b-7b93-4310-b06e-2977ae356cd4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.417523] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1258.417523] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a7ca9f-2e92-e33f-3df8-247dab41d0e4" [ 1258.417523] env[61545]: _type = "Task" [ 1258.417523] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.427143] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a7ca9f-2e92-e33f-3df8-247dab41d0e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.524371] env[61545]: DEBUG nova.network.neutron [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Updated VIF entry in instance network info cache for port 30b017cf-ed19-4ecc-a917-33a9eff869e2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1258.524807] env[61545]: DEBUG nova.network.neutron [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Updating instance_info_cache with network_info: [{"id": "30b017cf-ed19-4ecc-a917-33a9eff869e2", "address": "fa:16:3e:ac:7c:6b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30b017cf-ed", "ovs_interfaceid": "30b017cf-ed19-4ecc-a917-33a9eff869e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.536075] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1258.854103] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52621dcc-1b06-e631-0bfa-f4986980e27b, 'name': SearchDatastore_Task, 'duration_secs': 0.011504} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.854103] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1258.854518] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1258.854518] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.883945] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256857, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.901218] env[61545]: DEBUG oslo_vmware.api [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256856, 'name': PowerOnVM_Task, 'duration_secs': 0.654662} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.901499] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.901711] env[61545]: INFO nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Took 8.00 seconds to spawn the instance on the hypervisor. [ 1258.901908] env[61545]: DEBUG nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1258.902745] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275af56d-e42a-416c-a5f7-eb2f0f88afb2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.928724] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a7ca9f-2e92-e33f-3df8-247dab41d0e4, 'name': SearchDatastore_Task, 'duration_secs': 0.01262} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.929064] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1258.929335] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] f1243572-2768-4e0f-b3e7-26aa4554d987/f1243572-2768-4e0f-b3e7-26aa4554d987.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1258.929633] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1258.929825] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1258.930081] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c893544-4f7c-4b24-9a7b-b134da7ee819 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.932494] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c061d20-3e19-4838-816b-391faa1dbefe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.942353] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1258.942353] env[61545]: value = "task-4256858" [ 1258.942353] env[61545]: _type = "Task" [ 1258.942353] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.946867] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1258.947098] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1258.948292] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39e8b169-3c25-4a7c-a95b-16053476fab9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.954773] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256858, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.958115] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1258.958115] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b2f4c5-5d9f-a76e-d92d-dcf4348800ae" [ 1258.958115] env[61545]: _type = "Task" [ 1258.958115] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.966640] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b2f4c5-5d9f-a76e-d92d-dcf4348800ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.028488] env[61545]: DEBUG oslo_concurrency.lockutils [req-a83e18ba-8cf4-4d63-b24d-828595b2e414 req-1753bd3b-81c2-4335-bc16-a735fdaf6aca service nova] Releasing lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1259.041616] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1259.041860] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.284s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.042144] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.753s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.042395] env[61545]: DEBUG nova.objects.instance [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lazy-loading 'resources' on Instance uuid 5393730d-6a4b-418a-9047-4287f87c8d14 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.383475] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558741} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.383808] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa/c7b7f132-d863-4271-94be-a3c0aaed43fa.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1259.384065] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1259.384400] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eea128a3-5db2-4cee-b329-ce13fccf117a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.392335] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1259.392335] env[61545]: value = "task-4256859" [ 1259.392335] env[61545]: _type = "Task" [ 1259.392335] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.400930] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.421892] env[61545]: INFO nova.compute.manager [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Took 14.70 seconds to build instance. [ 1259.453223] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256858, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501637} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.453523] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] f1243572-2768-4e0f-b3e7-26aa4554d987/f1243572-2768-4e0f-b3e7-26aa4554d987.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1259.453740] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1259.453999] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-551c4f7b-c5bd-45ce-9896-66ae4f3661e1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.463143] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1259.463143] env[61545]: value = "task-4256860" [ 1259.463143] env[61545]: _type = "Task" [ 1259.463143] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.471153] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b2f4c5-5d9f-a76e-d92d-dcf4348800ae, 'name': SearchDatastore_Task, 'duration_secs': 0.011939} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.472082] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ffe278e-8c69-407d-ae7a-2d0637df7964 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.477401] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.481242] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1259.481242] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282fbfd-65a0-c098-1e44-f41c299e4b75" [ 1259.481242] env[61545]: _type = "Task" [ 1259.481242] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.489407] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282fbfd-65a0-c098-1e44-f41c299e4b75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.688381] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66dc4162-dc30-41e5-918b-c9ff2466a948 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.697419] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516e443a-64db-4d2c-9d64-1d20a3ee881c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.730325] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6327552-ec16-44e8-8083-775998310845 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.738324] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe53377e-d2ef-4deb-a61c-4f2897e1ba1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.752015] env[61545]: DEBUG nova.compute.provider_tree [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1259.902648] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.238398} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.903013] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1259.903738] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88859046-057b-4b03-a632-63f6a208c09d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.925813] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa/c7b7f132-d863-4271-94be-a3c0aaed43fa.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1259.926253] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a7b29daf-d422-4cac-9c1e-43a4433a7b66 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.210s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.926461] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2102083d-5845-4de6-9cbf-485ace94ab04 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.945626] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1259.945626] env[61545]: value = "task-4256861" [ 1259.945626] env[61545]: _type = "Task" [ 1259.945626] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.953658] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256861, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.972075] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.253492} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.972360] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1259.973194] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f837d2-ea0a-4826-b425-66e54f8c8a95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.995709] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] f1243572-2768-4e0f-b3e7-26aa4554d987/f1243572-2768-4e0f-b3e7-26aa4554d987.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1259.998996] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21f52116-ade3-40fb-b24d-d0c36738e05b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.019301] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5282fbfd-65a0-c098-1e44-f41c299e4b75, 'name': SearchDatastore_Task, 'duration_secs': 0.026831} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.021078] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1260.021530] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 99c9b4ab-efcf-4e13-bd92-c634972fe082/99c9b4ab-efcf-4e13-bd92-c634972fe082.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1260.021739] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1260.021739] env[61545]: value = "task-4256862" [ 1260.021739] env[61545]: _type = "Task" [ 1260.021739] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.021985] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a55ae9fa-446d-4896-8431-4976c6a83ab1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.032621] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256862, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.034159] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1260.034159] env[61545]: value = "task-4256863" [ 1260.034159] env[61545]: _type = "Task" [ 1260.034159] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.042165] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.280042] env[61545]: ERROR nova.scheduler.client.report [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] [req-19047ccb-0349-4349-96ae-f94394c8e73f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-19047ccb-0349-4349-96ae-f94394c8e73f"}]} [ 1260.298542] env[61545]: DEBUG nova.scheduler.client.report [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1260.317500] env[61545]: DEBUG nova.scheduler.client.report [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1260.317878] env[61545]: DEBUG nova.compute.provider_tree [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1260.335958] env[61545]: DEBUG nova.scheduler.client.report [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1260.358339] env[61545]: DEBUG nova.scheduler.client.report [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1260.466370] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256861, 'name': ReconfigVM_Task, 'duration_secs': 0.393443} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.469396] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Reconfigured VM instance instance-0000006e to attach disk [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa/c7b7f132-d863-4271-94be-a3c0aaed43fa.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1260.470373] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9779c76-c774-4264-921a-94310a74386a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.481149] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1260.481149] env[61545]: value = "task-4256864" [ 1260.481149] env[61545]: _type = "Task" [ 1260.481149] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.489559] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256864, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.536775] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256862, 'name': ReconfigVM_Task, 'duration_secs': 0.332224} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.541026] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Reconfigured VM instance instance-00000070 to attach disk [datastore2] f1243572-2768-4e0f-b3e7-26aa4554d987/f1243572-2768-4e0f-b3e7-26aa4554d987.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1260.542720] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc38b2cc-9f9e-48a0-a6dc-9d1dbf8dd22c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.545912] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2db06f8-0b39-4cbc-ae72-513eaf0a24da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.555596] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256863, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.559419] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1260.559419] env[61545]: value = "task-4256865" [ 1260.559419] env[61545]: _type = "Task" [ 1260.559419] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.560530] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795f1629-8dad-42f8-996d-87d5dac22d1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.578180] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256865, 'name': Rename_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.607529] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a936caad-3052-4a6d-a3c8-129ebd0f42ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.617696] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8c44ee-8c12-469b-b6ec-23df6bb16791 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.634831] env[61545]: DEBUG nova.compute.provider_tree [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1260.800428] env[61545]: DEBUG nova.compute.manager [req-65934cab-1513-4295-94b8-c6b0be2ae889 req-92b51d9c-b499-4eb9-8a38-c68890f3f7dd service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1260.800651] env[61545]: DEBUG nova.compute.manager [req-65934cab-1513-4295-94b8-c6b0be2ae889 req-92b51d9c-b499-4eb9-8a38-c68890f3f7dd service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing instance network info cache due to event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1260.803202] env[61545]: DEBUG oslo_concurrency.lockutils [req-65934cab-1513-4295-94b8-c6b0be2ae889 req-92b51d9c-b499-4eb9-8a38-c68890f3f7dd service nova] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.804195] env[61545]: DEBUG oslo_concurrency.lockutils [req-65934cab-1513-4295-94b8-c6b0be2ae889 req-92b51d9c-b499-4eb9-8a38-c68890f3f7dd service nova] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1260.804195] env[61545]: DEBUG nova.network.neutron [req-65934cab-1513-4295-94b8-c6b0be2ae889 req-92b51d9c-b499-4eb9-8a38-c68890f3f7dd service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1260.990057] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256864, 'name': Rename_Task, 'duration_secs': 0.372567} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.990645] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1260.990746] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52e3db77-ebd3-4781-b46b-b0f89c8f7b8d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.998256] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1260.998256] env[61545]: value = "task-4256866" [ 1260.998256] env[61545]: _type = "Task" [ 1260.998256] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.006933] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256866, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.039486] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.039701] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.040345] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.040520] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.040672] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1261.045820] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256863, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664255} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.045820] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 99c9b4ab-efcf-4e13-bd92-c634972fe082/99c9b4ab-efcf-4e13-bd92-c634972fe082.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1261.045928] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1261.046222] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd8738a7-19b1-4eb2-a27b-4ac9e634b02e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.054282] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1261.054282] env[61545]: value = "task-4256867" [ 1261.054282] env[61545]: _type = "Task" [ 1261.054282] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.067191] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256867, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.079980] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256865, 'name': Rename_Task, 'duration_secs': 0.294039} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.079980] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1261.079980] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ea9ca3f-4dcc-43ed-8327-86f198755af0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.089045] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1261.089045] env[61545]: value = "task-4256868" [ 1261.089045] env[61545]: _type = "Task" [ 1261.089045] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.102072] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.175022] env[61545]: DEBUG nova.scheduler.client.report [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 162 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1261.175329] env[61545]: DEBUG nova.compute.provider_tree [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 162 to 163 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1261.175520] env[61545]: DEBUG nova.compute.provider_tree [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1261.515157] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256866, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.541996] env[61545]: DEBUG nova.network.neutron [req-65934cab-1513-4295-94b8-c6b0be2ae889 req-92b51d9c-b499-4eb9-8a38-c68890f3f7dd service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updated VIF entry in instance network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1261.542438] env[61545]: DEBUG nova.network.neutron [req-65934cab-1513-4295-94b8-c6b0be2ae889 req-92b51d9c-b499-4eb9-8a38-c68890f3f7dd service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.566145] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256867, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074563} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.566594] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1261.567824] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31778b77-ed55-40ff-9077-9e754195944d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.605815] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 99c9b4ab-efcf-4e13-bd92-c634972fe082/99c9b4ab-efcf-4e13-bd92-c634972fe082.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1261.611579] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95bf3883-bd9e-4871-8f93-d1086eba7117 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.645078] env[61545]: DEBUG oslo_vmware.api [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256868, 'name': PowerOnVM_Task, 'duration_secs': 0.529511} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.647131] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1261.647474] env[61545]: INFO nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Took 8.50 seconds to spawn the instance on the hypervisor. [ 1261.647760] env[61545]: DEBUG nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1261.648267] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1261.648267] env[61545]: value = "task-4256869" [ 1261.648267] env[61545]: _type = "Task" [ 1261.648267] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.649394] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d7348e-e89e-408d-bd53-820caf0d6f4e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.425018] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.383s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.427166] env[61545]: DEBUG oslo_concurrency.lockutils [req-65934cab-1513-4295-94b8-c6b0be2ae889 req-92b51d9c-b499-4eb9-8a38-c68890f3f7dd service nova] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.437876] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.438185] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.447708] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256869, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.447905] env[61545]: WARNING oslo_vmware.common.loopingcall [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] task run outlasted interval by 0.294763 sec [ 1262.455987] env[61545]: DEBUG oslo_vmware.api [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256866, 'name': PowerOnVM_Task, 'duration_secs': 0.524601} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.456987] env[61545]: INFO nova.scheduler.client.report [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Deleted allocations for instance 5393730d-6a4b-418a-9047-4287f87c8d14 [ 1262.461045] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1262.461253] env[61545]: DEBUG nova.compute.manager [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1262.462945] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd60a907-a50c-4880-9737-b88fdc1cb1e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.469619] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256869, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.933165] env[61545]: DEBUG oslo_concurrency.lockutils [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.933593] env[61545]: DEBUG oslo_concurrency.lockutils [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.933818] env[61545]: DEBUG nova.compute.manager [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1262.935089] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fb99b9-3c86-430a-bdca-bded31c5d25f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.941938] env[61545]: DEBUG nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1262.944625] env[61545]: DEBUG nova.compute.manager [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1262.945207] env[61545]: DEBUG nova.objects.instance [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'flavor' on Instance uuid fde00c6e-29b5-4b99-944a-c0404e4f2fae {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1262.959181] env[61545]: INFO nova.compute.manager [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Took 15.41 seconds to build instance. [ 1262.967775] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256869, 'name': ReconfigVM_Task, 'duration_secs': 0.98347} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.968758] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bd3908ab-f505-4f77-a98e-9620d56cc29d tempest-AttachVolumeNegativeTest-966914432 tempest-AttachVolumeNegativeTest-966914432-project-member] Lock "5393730d-6a4b-418a-9047-4287f87c8d14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.519s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.969879] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 99c9b4ab-efcf-4e13-bd92-c634972fe082/99c9b4ab-efcf-4e13-bd92-c634972fe082.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1262.970782] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2c0642b-4b21-4492-bab5-8308bb45ebd0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.983767] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1262.983767] env[61545]: value = "task-4256870" [ 1262.983767] env[61545]: _type = "Task" [ 1262.983767] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.986224] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.986224] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.986379] env[61545]: DEBUG nova.objects.instance [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1263.000060] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256870, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.461667] env[61545]: DEBUG oslo_concurrency.lockutils [None req-291d864b-1405-486c-8808-17f7415077c3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "f1243572-2768-4e0f-b3e7-26aa4554d987" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.928s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.470834] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.500276] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256870, 'name': Rename_Task, 'duration_secs': 0.305391} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.500948] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1263.501223] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-565cb760-7067-44be-a979-39e13132ebd0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.509779] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1263.509779] env[61545]: value = "task-4256872" [ 1263.509779] env[61545]: _type = "Task" [ 1263.509779] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.520179] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.762252] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "f1243572-2768-4e0f-b3e7-26aa4554d987" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.762933] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "f1243572-2768-4e0f-b3e7-26aa4554d987" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.762933] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "f1243572-2768-4e0f-b3e7-26aa4554d987-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.763222] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "f1243572-2768-4e0f-b3e7-26aa4554d987-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.763631] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "f1243572-2768-4e0f-b3e7-26aa4554d987-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.767505] env[61545]: INFO nova.compute.manager [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Terminating instance [ 1263.957460] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1263.957825] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ca69805-9d35-4186-9f01-cb3b322cd9af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.965612] env[61545]: DEBUG oslo_vmware.api [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1263.965612] env[61545]: value = "task-4256873" [ 1263.965612] env[61545]: _type = "Task" [ 1263.965612] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.976440] env[61545]: DEBUG oslo_vmware.api [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.002163] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a6be1930-dfb9-4ae8-accd-07b628178e05 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.003515] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.533s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.005766] env[61545]: INFO nova.compute.claims [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1264.021036] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256872, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.215634] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.215986] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.216274] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "c7b7f132-d863-4271-94be-a3c0aaed43fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.216514] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.216709] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.219402] env[61545]: INFO nova.compute.manager [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Terminating instance [ 1264.272303] env[61545]: DEBUG nova.compute.manager [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1264.272579] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1264.273708] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f7a5f8-5e46-4587-a20f-b37ef8b6563d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.283915] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.284346] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b33344ab-fd47-40e6-a9f4-8b6fd11652ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.292465] env[61545]: DEBUG oslo_vmware.api [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1264.292465] env[61545]: value = "task-4256874" [ 1264.292465] env[61545]: _type = "Task" [ 1264.292465] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.305348] env[61545]: DEBUG oslo_vmware.api [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.476214] env[61545]: DEBUG oslo_vmware.api [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256873, 'name': PowerOffVM_Task, 'duration_secs': 0.347264} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.476517] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1264.476718] env[61545]: DEBUG nova.compute.manager [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1264.477527] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886d2c9e-643f-4a8e-9487-aba679700fc8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.521726] env[61545]: DEBUG oslo_vmware.api [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256872, 'name': PowerOnVM_Task, 'duration_secs': 0.658619} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.522948] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1264.523170] env[61545]: INFO nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1264.523383] env[61545]: DEBUG nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1264.524187] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1517d09d-a7b5-40a3-a89c-306f2fe4b52c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.724310] env[61545]: DEBUG nova.compute.manager [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1264.724568] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1264.725566] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa94253f-1a74-4c33-8a05-524c7db2683a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.733666] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.733925] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.738935] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.739121] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bb64993-9b63-4433-b988-54c54eb3be19 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.746713] env[61545]: DEBUG oslo_vmware.api [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1264.746713] env[61545]: value = "task-4256875" [ 1264.746713] env[61545]: _type = "Task" [ 1264.746713] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.757978] env[61545]: DEBUG oslo_vmware.api [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256875, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.802823] env[61545]: DEBUG oslo_vmware.api [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256874, 'name': PowerOffVM_Task, 'duration_secs': 0.196134} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.803156] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1264.803353] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1264.803617] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-501ac353-18c9-455b-a9c6-f910777d4b80 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.883894] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1264.884170] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1264.884342] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleting the datastore file [datastore2] f1243572-2768-4e0f-b3e7-26aa4554d987 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1264.884640] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd2a897f-ef3a-4491-beb4-59cefa208bff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.892580] env[61545]: DEBUG oslo_vmware.api [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1264.892580] env[61545]: value = "task-4256877" [ 1264.892580] env[61545]: _type = "Task" [ 1264.892580] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.901845] env[61545]: DEBUG oslo_vmware.api [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256877, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.994076] env[61545]: DEBUG oslo_concurrency.lockutils [None req-679c92ee-3115-4e83-b292-8cea5b067785 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.060s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.046347] env[61545]: INFO nova.compute.manager [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Took 14.52 seconds to build instance. [ 1265.176638] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6bb460-cd65-437f-9439-46d461efb654 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.185353] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3024d74-14f6-4656-a044-7ae18417b4b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.219023] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31bd455-37e6-4076-a735-cdab54a76c5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.227310] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f52d6a-9141-49d4-80a1-26cf3e903be2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.241599] env[61545]: DEBUG nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1265.244544] env[61545]: DEBUG nova.compute.provider_tree [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.256688] env[61545]: DEBUG oslo_vmware.api [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256875, 'name': PowerOffVM_Task, 'duration_secs': 0.262054} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.256938] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1265.257121] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1265.257389] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0df820fa-cfbb-47ab-8feb-8117a38ea001 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.341615] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1265.341826] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1265.342129] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleting the datastore file [datastore2] c7b7f132-d863-4271-94be-a3c0aaed43fa {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1265.342408] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3c44c2a-96b4-4ecb-926c-a5bd56175ac4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.350343] env[61545]: DEBUG oslo_vmware.api [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1265.350343] env[61545]: value = "task-4256879" [ 1265.350343] env[61545]: _type = "Task" [ 1265.350343] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.358855] env[61545]: DEBUG oslo_vmware.api [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.403273] env[61545]: DEBUG oslo_vmware.api [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256877, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219944} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.403552] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.403843] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1265.404082] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1265.404302] env[61545]: INFO nova.compute.manager [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1265.404606] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1265.404832] env[61545]: DEBUG nova.compute.manager [-] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1265.404915] env[61545]: DEBUG nova.network.neutron [-] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1265.548097] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a45bfe7b-4018-4008-b722-b9472789b02b tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "99c9b4ab-efcf-4e13-bd92-c634972fe082" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.034s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.750215] env[61545]: DEBUG nova.scheduler.client.report [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1265.775290] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.863945] env[61545]: DEBUG oslo_vmware.api [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.474637} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.863945] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.863945] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1265.863945] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1265.863945] env[61545]: INFO nova.compute.manager [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1265.864424] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1265.864683] env[61545]: DEBUG nova.compute.manager [-] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1265.864801] env[61545]: DEBUG nova.network.neutron [-] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1266.227797] env[61545]: DEBUG nova.compute.manager [req-cf150ff9-f5ba-4735-b223-6ea29f5ee63e req-a7de4a0b-ad6b-4a54-8711-cf0953b89b7c service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Received event network-vif-deleted-fd769309-365b-44c3-a088-5d1dc94d9e5a {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1266.228119] env[61545]: INFO nova.compute.manager [req-cf150ff9-f5ba-4735-b223-6ea29f5ee63e req-a7de4a0b-ad6b-4a54-8711-cf0953b89b7c service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Neutron deleted interface fd769309-365b-44c3-a088-5d1dc94d9e5a; detaching it from the instance and deleting it from the info cache [ 1266.228369] env[61545]: DEBUG nova.network.neutron [req-cf150ff9-f5ba-4735-b223-6ea29f5ee63e req-a7de4a0b-ad6b-4a54-8711-cf0953b89b7c service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.263064] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.255s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.263064] env[61545]: DEBUG nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1266.263064] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.487s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.263851] env[61545]: INFO nova.compute.claims [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1266.282140] env[61545]: DEBUG nova.network.neutron [-] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.697314] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.697314] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.697314] env[61545]: INFO nova.compute.manager [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Shelving [ 1266.731479] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81c639e5-e43b-428d-9c46-8aa0280f201a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.743062] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47fb961-6e79-41ce-a338-d723a264447e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.773656] env[61545]: DEBUG nova.compute.utils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1266.787778] env[61545]: DEBUG nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1266.788162] env[61545]: DEBUG nova.network.neutron [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1266.792867] env[61545]: INFO nova.compute.manager [-] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Took 1.39 seconds to deallocate network for instance. [ 1266.793504] env[61545]: DEBUG nova.compute.manager [req-cf150ff9-f5ba-4735-b223-6ea29f5ee63e req-a7de4a0b-ad6b-4a54-8711-cf0953b89b7c service nova] [instance: f1243572-2768-4e0f-b3e7-26aa4554d987] Detach interface failed, port_id=fd769309-365b-44c3-a088-5d1dc94d9e5a, reason: Instance f1243572-2768-4e0f-b3e7-26aa4554d987 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1266.795871] env[61545]: DEBUG nova.objects.instance [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'flavor' on Instance uuid fde00c6e-29b5-4b99-944a-c0404e4f2fae {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1266.799897] env[61545]: DEBUG nova.network.neutron [-] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.862143] env[61545]: DEBUG nova.policy [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1267.224175] env[61545]: DEBUG nova.network.neutron [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Successfully created port: 735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1267.296234] env[61545]: DEBUG nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1267.302457] env[61545]: INFO nova.compute.manager [-] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Took 1.44 seconds to deallocate network for instance. [ 1267.304372] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.304536] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1267.304704] env[61545]: DEBUG nova.network.neutron [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1267.304879] env[61545]: DEBUG nova.objects.instance [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'info_cache' on Instance uuid fde00c6e-29b5-4b99-944a-c0404e4f2fae {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1267.306585] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.478586] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673e83e6-14aa-418b-a4f3-7d63a4fe71f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.488104] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a89a330-641f-4970-a4a1-3a04be6901ce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.520312] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad47e25b-47e4-4dfa-a46d-cdab88c134b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.529987] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb53753b-b61e-4a3d-b536-c1457f6b4ff0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.546119] env[61545]: DEBUG nova.compute.provider_tree [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1267.707865] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1267.708277] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d8f043b-5159-43e9-99e9-53f3c7d6a745 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.717386] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1267.717386] env[61545]: value = "task-4256882" [ 1267.717386] env[61545]: _type = "Task" [ 1267.717386] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.729609] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.809154] env[61545]: DEBUG nova.objects.base [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1267.810986] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1268.050440] env[61545]: DEBUG nova.scheduler.client.report [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1268.227955] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256882, 'name': PowerOffVM_Task, 'duration_secs': 0.250681} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.228275] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1268.229139] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f16a7b-396b-475d-9274-583d8732a48b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.249369] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac197b13-1378-4cf3-aeef-3d98eb1094fc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.261969] env[61545]: DEBUG nova.compute.manager [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] [instance: c7b7f132-d863-4271-94be-a3c0aaed43fa] Received event network-vif-deleted-1df49c85-dd94-41c4-9c7d-d7df50f0288d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1268.262234] env[61545]: DEBUG nova.compute.manager [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Received event network-changed-30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1268.262404] env[61545]: DEBUG nova.compute.manager [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Refreshing instance network info cache due to event network-changed-30b017cf-ed19-4ecc-a917-33a9eff869e2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1268.262640] env[61545]: DEBUG oslo_concurrency.lockutils [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] Acquiring lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.262749] env[61545]: DEBUG oslo_concurrency.lockutils [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] Acquired lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.262910] env[61545]: DEBUG nova.network.neutron [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Refreshing network info cache for port 30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1268.307988] env[61545]: DEBUG nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1268.343797] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1268.344315] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1268.344616] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1268.344916] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1268.346252] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1268.346252] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1268.346252] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1268.346252] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1268.346252] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1268.346252] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1268.348403] env[61545]: DEBUG nova.virt.hardware [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1268.348403] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6230ff-dc41-402b-ba09-db3f291be26e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.356110] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff7e4c4-8831-414b-bf85-e8b6507db486 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.556133] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.556309] env[61545]: DEBUG nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1268.559275] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.253s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.559458] env[61545]: DEBUG nova.objects.instance [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lazy-loading 'resources' on Instance uuid f1243572-2768-4e0f-b3e7-26aa4554d987 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.614178] env[61545]: DEBUG nova.network.neutron [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Updating instance_info_cache with network_info: [{"id": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "address": "fa:16:3e:b0:54:e7", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd4ffa1-26", "ovs_interfaceid": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.760802] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Creating Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1268.761229] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f60c171c-358f-4892-9a2e-f47bc3f10812 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.769097] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1268.769097] env[61545]: value = "task-4256883" [ 1268.769097] env[61545]: _type = "Task" [ 1268.769097] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.778433] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256883, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.062484] env[61545]: DEBUG nova.compute.utils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1269.066659] env[61545]: DEBUG nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1269.066799] env[61545]: DEBUG nova.network.neutron [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1269.116995] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.136448] env[61545]: DEBUG nova.network.neutron [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Successfully updated port: 735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1269.161669] env[61545]: DEBUG nova.policy [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4333754ae4a4e26bab98dfe1853e667', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b64f16b672ff471ba1d48aa2490b9829', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1269.209952] env[61545]: DEBUG nova.network.neutron [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Updated VIF entry in instance network info cache for port 30b017cf-ed19-4ecc-a917-33a9eff869e2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1269.210499] env[61545]: DEBUG nova.network.neutron [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Updating instance_info_cache with network_info: [{"id": "30b017cf-ed19-4ecc-a917-33a9eff869e2", "address": "fa:16:3e:ac:7c:6b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30b017cf-ed", "ovs_interfaceid": "30b017cf-ed19-4ecc-a917-33a9eff869e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.253354] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1d0400-b7a6-4517-8f1e-2dfe9f413b8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.261868] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e68b8a-89b4-4ae6-a113-76f816f4c97e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.298288] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3efd07-9839-4d58-bd7c-a68b0c0fdeeb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.308701] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256883, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.310037] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfd7b2a-0bd3-418a-b95d-04a4fa00f685 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.324885] env[61545]: DEBUG nova.compute.provider_tree [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.482140] env[61545]: DEBUG nova.network.neutron [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Successfully created port: cf35a0bc-a18e-493b-b59c-794f2b962cd7 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1269.567147] env[61545]: DEBUG nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1269.641809] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.642295] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1269.642295] env[61545]: DEBUG nova.network.neutron [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1269.714049] env[61545]: DEBUG oslo_concurrency.lockutils [req-9e271f88-6061-4765-ae9a-bb60e8df4459 req-4b098d0c-ab09-4cf3-a7e5-1e4c1572f718 service nova] Releasing lock "refresh_cache-99c9b4ab-efcf-4e13-bd92-c634972fe082" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.805394] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256883, 'name': CreateSnapshot_Task, 'duration_secs': 0.63487} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.805845] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Created Snapshot of the VM instance {{(pid=61545) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1269.807354] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1334cac5-c2cd-40e3-9a24-f896e84f3b6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.830877] env[61545]: DEBUG nova.scheduler.client.report [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1270.124645] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1270.124965] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-befe1f54-d84e-4b2c-baf9-671a2b625260 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.133416] env[61545]: DEBUG oslo_vmware.api [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1270.133416] env[61545]: value = "task-4256885" [ 1270.133416] env[61545]: _type = "Task" [ 1270.133416] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.151370] env[61545]: DEBUG oslo_vmware.api [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.190387] env[61545]: DEBUG nova.network.neutron [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1270.295277] env[61545]: DEBUG nova.compute.manager [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received event network-vif-plugged-735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1270.295572] env[61545]: DEBUG oslo_concurrency.lockutils [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] Acquiring lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.295888] env[61545]: DEBUG oslo_concurrency.lockutils [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.296112] env[61545]: DEBUG oslo_concurrency.lockutils [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.296503] env[61545]: DEBUG nova.compute.manager [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] No waiting events found dispatching network-vif-plugged-735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1270.296737] env[61545]: WARNING nova.compute.manager [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received unexpected event network-vif-plugged-735d1ad5-27dd-48fe-9d11-abc15c2f647b for instance with vm_state building and task_state spawning. [ 1270.296950] env[61545]: DEBUG nova.compute.manager [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received event network-changed-735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1270.297753] env[61545]: DEBUG nova.compute.manager [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing instance network info cache due to event network-changed-735d1ad5-27dd-48fe-9d11-abc15c2f647b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1270.297753] env[61545]: DEBUG oslo_concurrency.lockutils [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] Acquiring lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.332047] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Creating linked-clone VM from snapshot {{(pid=61545) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1270.335166] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5e552822-fd99-4da9-b51d-71d83f081f87 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.339826] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.781s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.343063] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.531s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.343063] env[61545]: DEBUG nova.objects.instance [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lazy-loading 'resources' on Instance uuid c7b7f132-d863-4271-94be-a3c0aaed43fa {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1270.346454] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1270.346454] env[61545]: value = "task-4256887" [ 1270.346454] env[61545]: _type = "Task" [ 1270.346454] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.358894] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.370627] env[61545]: INFO nova.scheduler.client.report [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted allocations for instance f1243572-2768-4e0f-b3e7-26aa4554d987 [ 1270.377374] env[61545]: DEBUG nova.network.neutron [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.579459] env[61545]: DEBUG nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1270.613348] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1270.613530] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1270.613663] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1270.613800] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1270.613948] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1270.614127] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1270.614345] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1270.614506] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1270.614668] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1270.614827] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1270.615038] env[61545]: DEBUG nova.virt.hardware [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1270.616036] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68aa8f83-d4f3-47cd-883b-65135257522c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.624783] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8d50af-839f-4e20-b445-fd4c565c5c34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.649568] env[61545]: DEBUG oslo_vmware.api [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256885, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.861205] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task} progress is 34%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.879394] env[61545]: DEBUG oslo_concurrency.lockutils [None req-601a5f85-de1c-40b4-9c0c-b65ee81634a3 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "f1243572-2768-4e0f-b3e7-26aa4554d987" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.117s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.880560] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1270.880843] env[61545]: DEBUG nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Instance network_info: |[{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1270.881357] env[61545]: DEBUG oslo_concurrency.lockutils [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] Acquired lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.881548] env[61545]: DEBUG nova.network.neutron [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing network info cache for port 735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1270.882769] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:c7:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '735d1ad5-27dd-48fe-9d11-abc15c2f647b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1270.891080] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1270.894353] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1270.895354] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3dd37a54-128d-44de-9a81-39e5b515ce1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.928753] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1270.928753] env[61545]: value = "task-4256888" [ 1270.928753] env[61545]: _type = "Task" [ 1270.928753] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.946124] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256888, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.038896] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7c6a93-584c-47f6-b51d-55e84a1b6e06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.047702] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106d149e-472f-4e50-9419-5b747bf5b024 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.080264] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6eeff33-0bdb-41cc-8582-5f3b86414ab2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.088377] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a2a0cc-d6e6-4b7d-a59d-0650d15a1bd0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.103403] env[61545]: DEBUG nova.compute.provider_tree [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.152330] env[61545]: DEBUG oslo_vmware.api [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256885, 'name': PowerOnVM_Task, 'duration_secs': 0.627861} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.152330] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1271.152330] env[61545]: DEBUG nova.compute.manager [None req-fde514a4-6bef-4e41-8082-b82832f31400 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1271.152330] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b55d84-aa56-47fe-9f10-e8ae46bfadff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.255558] env[61545]: DEBUG nova.network.neutron [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Successfully updated port: cf35a0bc-a18e-493b-b59c-794f2b962cd7 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1271.359943] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task} progress is 34%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.439615] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256888, 'name': CreateVM_Task, 'duration_secs': 0.383118} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.441798] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1271.442511] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.442680] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.443010] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1271.443574] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdc1faf7-ebf6-4961-9258-d7f0ce67a5b1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.449413] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1271.449413] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521a8dad-7fec-75f2-88c3-0efcf9be2b99" [ 1271.449413] env[61545]: _type = "Task" [ 1271.449413] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.459412] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521a8dad-7fec-75f2-88c3-0efcf9be2b99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.607230] env[61545]: DEBUG nova.scheduler.client.report [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1271.635574] env[61545]: DEBUG nova.network.neutron [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updated VIF entry in instance network info cache for port 735d1ad5-27dd-48fe-9d11-abc15c2f647b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1271.635893] env[61545]: DEBUG nova.network.neutron [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.758294] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-5e047505-5466-4d89-bdd0-ebe6ac7fef0c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.758442] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-5e047505-5466-4d89-bdd0-ebe6ac7fef0c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.758648] env[61545]: DEBUG nova.network.neutron [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1271.864731] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task} progress is 34%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.963565] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]521a8dad-7fec-75f2-88c3-0efcf9be2b99, 'name': SearchDatastore_Task, 'duration_secs': 0.011099} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.963912] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.964169] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1271.964416] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.964563] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.964766] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1271.965069] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ec4e30b-1790-4df1-9b33-94a11db93d4f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.976692] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1271.976893] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1271.977940] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fd15d83-64b6-4672-b8de-e414f78adadd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.984544] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1271.984544] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5283ad0a-4375-6e39-2fdf-0dd49bc25508" [ 1271.984544] env[61545]: _type = "Task" [ 1271.984544] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.994036] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5283ad0a-4375-6e39-2fdf-0dd49bc25508, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.113218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.771s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.142034] env[61545]: DEBUG oslo_concurrency.lockutils [req-820559e5-ee93-4fe5-8b7f-42394e6e07b0 req-65781c89-619e-4939-9c9d-e5413167d56e service nova] Releasing lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.143808] env[61545]: INFO nova.scheduler.client.report [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted allocations for instance c7b7f132-d863-4271-94be-a3c0aaed43fa [ 1272.179844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "22437c4c-0e0b-4191-b079-3f6b7031656c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.179844] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "22437c4c-0e0b-4191-b079-3f6b7031656c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.335734] env[61545]: DEBUG nova.network.neutron [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1272.362359] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.493260] env[61545]: DEBUG nova.compute.manager [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Received event network-vif-plugged-cf35a0bc-a18e-493b-b59c-794f2b962cd7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1272.493525] env[61545]: DEBUG oslo_concurrency.lockutils [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] Acquiring lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.493795] env[61545]: DEBUG oslo_concurrency.lockutils [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] Lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.493901] env[61545]: DEBUG oslo_concurrency.lockutils [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] Lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.498905] env[61545]: DEBUG nova.compute.manager [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] No waiting events found dispatching network-vif-plugged-cf35a0bc-a18e-493b-b59c-794f2b962cd7 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1272.498905] env[61545]: WARNING nova.compute.manager [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Received unexpected event network-vif-plugged-cf35a0bc-a18e-493b-b59c-794f2b962cd7 for instance with vm_state building and task_state spawning. [ 1272.498905] env[61545]: DEBUG nova.compute.manager [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Received event network-changed-cf35a0bc-a18e-493b-b59c-794f2b962cd7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1272.498905] env[61545]: DEBUG nova.compute.manager [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Refreshing instance network info cache due to event network-changed-cf35a0bc-a18e-493b-b59c-794f2b962cd7. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1272.499109] env[61545]: DEBUG oslo_concurrency.lockutils [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] Acquiring lock "refresh_cache-5e047505-5466-4d89-bdd0-ebe6ac7fef0c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.508472] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5283ad0a-4375-6e39-2fdf-0dd49bc25508, 'name': SearchDatastore_Task, 'duration_secs': 0.013258} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.509308] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d76eebf6-5138-4716-8aef-0f898ec11b44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.515725] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1272.515725] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ebd6c-d2d2-7b15-d930-322b73959b8d" [ 1272.515725] env[61545]: _type = "Task" [ 1272.515725] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.527089] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ebd6c-d2d2-7b15-d930-322b73959b8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.585761] env[61545]: DEBUG nova.network.neutron [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Updating instance_info_cache with network_info: [{"id": "cf35a0bc-a18e-493b-b59c-794f2b962cd7", "address": "fa:16:3e:d5:12:1c", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf35a0bc-a1", "ovs_interfaceid": "cf35a0bc-a18e-493b-b59c-794f2b962cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.653252] env[61545]: DEBUG oslo_concurrency.lockutils [None req-908b90e9-3422-4e23-b2ae-0875ac856cf3 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "c7b7f132-d863-4271-94be-a3c0aaed43fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.437s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.683091] env[61545]: DEBUG nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1272.863535] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.026873] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524ebd6c-d2d2-7b15-d930-322b73959b8d, 'name': SearchDatastore_Task, 'duration_secs': 0.011295} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.027353] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.027494] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c677a1fe-4c95-4142-8f1a-fcc8a21389c6/c677a1fe-4c95-4142-8f1a-fcc8a21389c6.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1273.027806] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba5c10d3-c142-4682-9fe3-bfc5982f3b32 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.036396] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1273.036396] env[61545]: value = "task-4256890" [ 1273.036396] env[61545]: _type = "Task" [ 1273.036396] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.047566] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.088714] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-5e047505-5466-4d89-bdd0-ebe6ac7fef0c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.089208] env[61545]: DEBUG nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Instance network_info: |[{"id": "cf35a0bc-a18e-493b-b59c-794f2b962cd7", "address": "fa:16:3e:d5:12:1c", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf35a0bc-a1", "ovs_interfaceid": "cf35a0bc-a18e-493b-b59c-794f2b962cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1273.089569] env[61545]: DEBUG oslo_concurrency.lockutils [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] Acquired lock "refresh_cache-5e047505-5466-4d89-bdd0-ebe6ac7fef0c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.089765] env[61545]: DEBUG nova.network.neutron [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Refreshing network info cache for port cf35a0bc-a18e-493b-b59c-794f2b962cd7 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1273.091430] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:12:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf35a0bc-a18e-493b-b59c-794f2b962cd7', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1273.099374] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1273.100751] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1273.101118] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6bf9d822-ec6f-42d4-ad1f-1578001944dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.128079] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1273.128079] env[61545]: value = "task-4256891" [ 1273.128079] env[61545]: _type = "Task" [ 1273.128079] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.136589] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256891, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.204425] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eda5345-7e06-4f22-a5b5-641ce2097dee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.214571] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-02ba269e-8ca8-47ce-83de-19e2f80f4a10 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Suspending the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1273.214867] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-bcb70051-6ded-484c-924d-301d9442e8d1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.222495] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.222791] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.224911] env[61545]: INFO nova.compute.claims [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1273.230266] env[61545]: DEBUG oslo_vmware.api [None req-02ba269e-8ca8-47ce-83de-19e2f80f4a10 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1273.230266] env[61545]: value = "task-4256892" [ 1273.230266] env[61545]: _type = "Task" [ 1273.230266] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.241079] env[61545]: DEBUG oslo_vmware.api [None req-02ba269e-8ca8-47ce-83de-19e2f80f4a10 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256892, 'name': SuspendVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.364601] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.549790] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256890, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.639478] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256891, 'name': CreateVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.744093] env[61545]: DEBUG oslo_vmware.api [None req-02ba269e-8ca8-47ce-83de-19e2f80f4a10 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256892, 'name': SuspendVM_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.867077] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.048974] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571298} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.049366] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] c677a1fe-4c95-4142-8f1a-fcc8a21389c6/c677a1fe-4c95-4142-8f1a-fcc8a21389c6.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1274.049566] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1274.049800] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ce896c2-1e3d-4195-9395-155afefcd68b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.057495] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1274.057495] env[61545]: value = "task-4256893" [ 1274.057495] env[61545]: _type = "Task" [ 1274.057495] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.069380] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.138040] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256891, 'name': CreateVM_Task, 'duration_secs': 0.800463} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.138277] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1274.139021] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.139212] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.139667] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1274.139835] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e563d59-04ff-404b-a0fe-546dab672b30 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.145483] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1274.145483] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527872bf-b04b-1aab-8c11-0d362db68e7b" [ 1274.145483] env[61545]: _type = "Task" [ 1274.145483] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.159789] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527872bf-b04b-1aab-8c11-0d362db68e7b, 'name': SearchDatastore_Task, 'duration_secs': 0.011226} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.160167] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.160419] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1274.160685] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.161147] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.161147] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1274.161348] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4476563c-eab8-4f53-afc4-d7024588856b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.170605] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1274.170818] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1274.171666] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4636a5f7-f32b-4b4b-adc6-8a6394479112 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.177366] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1274.177366] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ea0435-8570-e606-c7b0-95bc331c2034" [ 1274.177366] env[61545]: _type = "Task" [ 1274.177366] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.186186] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ea0435-8570-e606-c7b0-95bc331c2034, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.244505] env[61545]: DEBUG oslo_vmware.api [None req-02ba269e-8ca8-47ce-83de-19e2f80f4a10 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256892, 'name': SuspendVM_Task, 'duration_secs': 0.9073} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.247024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-02ba269e-8ca8-47ce-83de-19e2f80f4a10 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Suspended the VM {{(pid=61545) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1274.247221] env[61545]: DEBUG nova.compute.manager [None req-02ba269e-8ca8-47ce-83de-19e2f80f4a10 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1274.248017] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f9eeaf-21c7-4742-a3c7-b7d371c026c8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.365737] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256887, 'name': CloneVM_Task, 'duration_secs': 3.653967} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.366139] env[61545]: INFO nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Created linked-clone VM from snapshot [ 1274.367014] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffa4dd0-fd93-428d-80e4-b4c71d86d88c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.374528] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Uploading image 86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1274.408037] env[61545]: DEBUG nova.network.neutron [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Updated VIF entry in instance network info cache for port cf35a0bc-a18e-493b-b59c-794f2b962cd7. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1274.408456] env[61545]: DEBUG nova.network.neutron [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Updating instance_info_cache with network_info: [{"id": "cf35a0bc-a18e-493b-b59c-794f2b962cd7", "address": "fa:16:3e:d5:12:1c", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf35a0bc-a1", "ovs_interfaceid": "cf35a0bc-a18e-493b-b59c-794f2b962cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.411814] env[61545]: DEBUG oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1274.411814] env[61545]: value = "vm-838857" [ 1274.411814] env[61545]: _type = "VirtualMachine" [ 1274.411814] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1274.412156] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9e68ff1a-5e9f-4a07-8173-e76d3b05681e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.423089] env[61545]: DEBUG oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease: (returnval){ [ 1274.423089] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f0de18-b6f0-5538-d536-c27052fdbd19" [ 1274.423089] env[61545]: _type = "HttpNfcLease" [ 1274.423089] env[61545]: } obtained for exporting VM: (result){ [ 1274.423089] env[61545]: value = "vm-838857" [ 1274.423089] env[61545]: _type = "VirtualMachine" [ 1274.423089] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1274.423654] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the lease: (returnval){ [ 1274.423654] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f0de18-b6f0-5538-d536-c27052fdbd19" [ 1274.423654] env[61545]: _type = "HttpNfcLease" [ 1274.423654] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1274.433080] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1274.433080] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f0de18-b6f0-5538-d536-c27052fdbd19" [ 1274.433080] env[61545]: _type = "HttpNfcLease" [ 1274.433080] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1274.442387] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c451e8-17f8-4983-af54-a5c07a1304b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.454020] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be23714-897a-4cf3-a03b-56f6f337adf5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.482385] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e6eba7-2066-4bbe-9258-41c148a594a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.490930] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59684154-38dc-47e7-b042-fd67867117e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.507225] env[61545]: DEBUG nova.compute.provider_tree [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.568177] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072184} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.568480] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1274.569374] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229ee4a1-f5a3-418c-ad54-1b1ec93fe750 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.593421] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] c677a1fe-4c95-4142-8f1a-fcc8a21389c6/c677a1fe-4c95-4142-8f1a-fcc8a21389c6.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1274.593919] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7099166-faa5-49f7-8af0-5b777a42d19e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.617474] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1274.617474] env[61545]: value = "task-4256896" [ 1274.617474] env[61545]: _type = "Task" [ 1274.617474] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.626241] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256896, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.689292] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ea0435-8570-e606-c7b0-95bc331c2034, 'name': SearchDatastore_Task, 'duration_secs': 0.009227} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.690466] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3fa6abb-da46-4b71-bafb-b79f27a71e8d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.697549] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1274.697549] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5283197a-2cc1-57f9-703c-2c6c874f78b3" [ 1274.697549] env[61545]: _type = "Task" [ 1274.697549] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.706624] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5283197a-2cc1-57f9-703c-2c6c874f78b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.917926] env[61545]: DEBUG oslo_concurrency.lockutils [req-d319468e-6016-4d85-af04-6dbfae38ec0f req-4780085e-b18a-4fe2-9838-3bd48fc615cc service nova] Releasing lock "refresh_cache-5e047505-5466-4d89-bdd0-ebe6ac7fef0c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.932786] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1274.932786] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f0de18-b6f0-5538-d536-c27052fdbd19" [ 1274.932786] env[61545]: _type = "HttpNfcLease" [ 1274.932786] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1274.933079] env[61545]: DEBUG oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1274.933079] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f0de18-b6f0-5538-d536-c27052fdbd19" [ 1274.933079] env[61545]: _type = "HttpNfcLease" [ 1274.933079] env[61545]: }. {{(pid=61545) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1274.933826] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c822b47-c532-47c5-836a-830c563d562b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.943852] env[61545]: DEBUG oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804eb0-516f-1631-44f1-98ddd87f8f44/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1274.943852] env[61545]: DEBUG oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804eb0-516f-1631-44f1-98ddd87f8f44/disk-0.vmdk for reading. {{(pid=61545) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1275.010354] env[61545]: DEBUG nova.scheduler.client.report [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1275.032418] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8014a8d1-5cbb-40d1-8812-65688bbfb868 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.127568] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256896, 'name': ReconfigVM_Task, 'duration_secs': 0.293148} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.128424] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Reconfigured VM instance instance-00000072 to attach disk [datastore2] c677a1fe-4c95-4142-8f1a-fcc8a21389c6/c677a1fe-4c95-4142-8f1a-fcc8a21389c6.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1275.130020] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3833195d-720d-4234-8258-360254288148 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.139033] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1275.139033] env[61545]: value = "task-4256897" [ 1275.139033] env[61545]: _type = "Task" [ 1275.139033] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.147150] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256897, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.210042] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5283197a-2cc1-57f9-703c-2c6c874f78b3, 'name': SearchDatastore_Task, 'duration_secs': 0.010633} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.210418] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.210585] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c/5e047505-5466-4d89-bdd0-ebe6ac7fef0c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1275.210925] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22ff377a-6c6a-427b-92ad-f9e75612f7fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.218489] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1275.218489] env[61545]: value = "task-4256898" [ 1275.218489] env[61545]: _type = "Task" [ 1275.218489] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.227446] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.515730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.516450] env[61545]: DEBUG nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1275.653310] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256897, 'name': Rename_Task, 'duration_secs': 0.155604} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.653806] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1275.654018] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e676442-04dc-4c74-9dc2-3538fa18c42f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.663838] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1275.663838] env[61545]: value = "task-4256899" [ 1275.663838] env[61545]: _type = "Task" [ 1275.663838] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.675560] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.730826] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256898, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.022679] env[61545]: DEBUG nova.compute.utils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1276.024716] env[61545]: DEBUG nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1276.025073] env[61545]: DEBUG nova.network.neutron [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1276.028120] env[61545]: INFO nova.compute.manager [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Resuming [ 1276.028825] env[61545]: DEBUG nova.objects.instance [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'flavor' on Instance uuid fde00c6e-29b5-4b99-944a-c0404e4f2fae {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1276.129490] env[61545]: DEBUG nova.policy [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d0d78511dd5408cba4db4e57271b5c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b790c7b2af394de28f7f42ce0d230346', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1276.176113] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256899, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.230520] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256898, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554187} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.230802] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c/5e047505-5466-4d89-bdd0-ebe6ac7fef0c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1276.231074] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1276.231371] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8425c710-ef6a-4c59-a7f8-78ecaac2161f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.240871] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1276.240871] env[61545]: value = "task-4256900" [ 1276.240871] env[61545]: _type = "Task" [ 1276.240871] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.252037] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256900, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.422874] env[61545]: DEBUG nova.network.neutron [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Successfully created port: a5c32d4d-c669-44cf-891b-60956afffddd {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1276.529274] env[61545]: DEBUG nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1276.676380] env[61545]: DEBUG oslo_vmware.api [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256899, 'name': PowerOnVM_Task, 'duration_secs': 0.60127} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.676741] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1276.677099] env[61545]: INFO nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1276.677398] env[61545]: DEBUG nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1276.678385] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a800b70-1786-4358-bba1-d97eb77d84d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.752486] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256900, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076219} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.753116] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1276.754132] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57bfc32-47f3-4da0-9999-66eef9abe988 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.779312] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c/5e047505-5466-4d89-bdd0-ebe6ac7fef0c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1276.780019] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cdb84a4-a184-4c92-bcb5-2f2460345e41 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.804641] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1276.804641] env[61545]: value = "task-4256902" [ 1276.804641] env[61545]: _type = "Task" [ 1276.804641] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.815441] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256902, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.198084] env[61545]: INFO nova.compute.manager [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Took 13.75 seconds to build instance. [ 1277.314714] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256902, 'name': ReconfigVM_Task, 'duration_secs': 0.500494} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.315017] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c/5e047505-5466-4d89-bdd0-ebe6ac7fef0c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1277.315682] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56b02ce2-c3e4-498b-80a8-bf0d03e6cbf6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.324135] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1277.324135] env[61545]: value = "task-4256903" [ 1277.324135] env[61545]: _type = "Task" [ 1277.324135] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.335492] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256903, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.543407] env[61545]: DEBUG nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1277.545919] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.546205] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquired lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1277.546450] env[61545]: DEBUG nova.network.neutron [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1277.578482] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1277.578723] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.578897] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1277.579234] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.579535] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1277.579639] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1277.579891] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1277.580120] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1277.580441] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1277.580567] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1277.580785] env[61545]: DEBUG nova.virt.hardware [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1277.582069] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c83382-5b35-42ef-a6dd-0aa70203a845 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.591088] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb471e5-f353-4168-aed1-5c8bbe00ab60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.700544] env[61545]: DEBUG oslo_concurrency.lockutils [None req-fff8b99f-4c5b-48dd-b479-b3f8ef0823f0 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.262s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.836925] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256903, 'name': Rename_Task, 'duration_secs': 0.191912} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.837185] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1277.837511] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bb96a46-7e62-4217-a356-8e3f8f44a5c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.847370] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1277.847370] env[61545]: value = "task-4256904" [ 1277.847370] env[61545]: _type = "Task" [ 1277.847370] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.860761] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.279873] env[61545]: DEBUG nova.compute.manager [req-fa317117-75a7-45dc-baf8-e720ecef8014 req-1bdcd09f-aac6-4d72-83c7-198a663acdc1 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Received event network-vif-plugged-a5c32d4d-c669-44cf-891b-60956afffddd {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1278.280747] env[61545]: DEBUG oslo_concurrency.lockutils [req-fa317117-75a7-45dc-baf8-e720ecef8014 req-1bdcd09f-aac6-4d72-83c7-198a663acdc1 service nova] Acquiring lock "22437c4c-0e0b-4191-b079-3f6b7031656c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1278.281914] env[61545]: DEBUG oslo_concurrency.lockutils [req-fa317117-75a7-45dc-baf8-e720ecef8014 req-1bdcd09f-aac6-4d72-83c7-198a663acdc1 service nova] Lock "22437c4c-0e0b-4191-b079-3f6b7031656c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.281914] env[61545]: DEBUG oslo_concurrency.lockutils [req-fa317117-75a7-45dc-baf8-e720ecef8014 req-1bdcd09f-aac6-4d72-83c7-198a663acdc1 service nova] Lock "22437c4c-0e0b-4191-b079-3f6b7031656c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.281914] env[61545]: DEBUG nova.compute.manager [req-fa317117-75a7-45dc-baf8-e720ecef8014 req-1bdcd09f-aac6-4d72-83c7-198a663acdc1 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] No waiting events found dispatching network-vif-plugged-a5c32d4d-c669-44cf-891b-60956afffddd {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1278.281914] env[61545]: WARNING nova.compute.manager [req-fa317117-75a7-45dc-baf8-e720ecef8014 req-1bdcd09f-aac6-4d72-83c7-198a663acdc1 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Received unexpected event network-vif-plugged-a5c32d4d-c669-44cf-891b-60956afffddd for instance with vm_state building and task_state spawning. [ 1278.302241] env[61545]: DEBUG nova.compute.manager [req-15fd64e0-e21b-4d28-b617-5371217f4351 req-0e44dc74-2e69-46be-a2ba-3642a292172d service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1278.302546] env[61545]: DEBUG nova.compute.manager [req-15fd64e0-e21b-4d28-b617-5371217f4351 req-0e44dc74-2e69-46be-a2ba-3642a292172d service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing instance network info cache due to event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1278.302815] env[61545]: DEBUG oslo_concurrency.lockutils [req-15fd64e0-e21b-4d28-b617-5371217f4351 req-0e44dc74-2e69-46be-a2ba-3642a292172d service nova] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.303017] env[61545]: DEBUG oslo_concurrency.lockutils [req-15fd64e0-e21b-4d28-b617-5371217f4351 req-0e44dc74-2e69-46be-a2ba-3642a292172d service nova] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1278.303240] env[61545]: DEBUG nova.network.neutron [req-15fd64e0-e21b-4d28-b617-5371217f4351 req-0e44dc74-2e69-46be-a2ba-3642a292172d service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1278.358495] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256904, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.497962] env[61545]: DEBUG nova.network.neutron [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Updating instance_info_cache with network_info: [{"id": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "address": "fa:16:3e:b0:54:e7", "network": {"id": "96b02d05-dca0-4f69-8812-b6215a881ccb", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1974724146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa2360863a5f4eff8a88eca0c88fa76d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd4ffa1-26", "ovs_interfaceid": "4bd4ffa1-26c1-49ba-b154-0c04a3294c9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.861509] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256904, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.914857] env[61545]: DEBUG nova.network.neutron [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Successfully updated port: a5c32d4d-c669-44cf-891b-60956afffddd {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.000523] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Releasing lock "refresh_cache-fde00c6e-29b5-4b99-944a-c0404e4f2fae" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.001639] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6f8726-bc18-4f78-93f5-1ed21a69477c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.010238] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Resuming the VM {{(pid=61545) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1279.010517] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec0e62e8-588c-411e-b4c8-2ea3fa7ae2d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.018439] env[61545]: DEBUG oslo_vmware.api [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1279.018439] env[61545]: value = "task-4256905" [ 1279.018439] env[61545]: _type = "Task" [ 1279.018439] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.028522] env[61545]: DEBUG oslo_vmware.api [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256905, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.127674] env[61545]: DEBUG nova.network.neutron [req-15fd64e0-e21b-4d28-b617-5371217f4351 req-0e44dc74-2e69-46be-a2ba-3642a292172d service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updated VIF entry in instance network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1279.127674] env[61545]: DEBUG nova.network.neutron [req-15fd64e0-e21b-4d28-b617-5371217f4351 req-0e44dc74-2e69-46be-a2ba-3642a292172d service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.358927] env[61545]: DEBUG oslo_vmware.api [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256904, 'name': PowerOnVM_Task, 'duration_secs': 1.051006} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.359370] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1279.359583] env[61545]: INFO nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Took 8.78 seconds to spawn the instance on the hypervisor. [ 1279.359765] env[61545]: DEBUG nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1279.360628] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c4b6c8-6ea5-4fb3-8847-4d6ee8f3ee23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.419340] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "refresh_cache-22437c4c-0e0b-4191-b079-3f6b7031656c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.419533] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "refresh_cache-22437c4c-0e0b-4191-b079-3f6b7031656c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.419691] env[61545]: DEBUG nova.network.neutron [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1279.535784] env[61545]: DEBUG oslo_vmware.api [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256905, 'name': PowerOnVM_Task} progress is 93%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.629474] env[61545]: DEBUG oslo_concurrency.lockutils [req-15fd64e0-e21b-4d28-b617-5371217f4351 req-0e44dc74-2e69-46be-a2ba-3642a292172d service nova] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.778648] env[61545]: DEBUG nova.compute.manager [req-4683533c-0f6e-4f2d-8be5-42342cbdba62 req-60442456-9d41-46bf-b3d0-bf8a6cfbe4dd service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received event network-changed-735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1279.778648] env[61545]: DEBUG nova.compute.manager [req-4683533c-0f6e-4f2d-8be5-42342cbdba62 req-60442456-9d41-46bf-b3d0-bf8a6cfbe4dd service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing instance network info cache due to event network-changed-735d1ad5-27dd-48fe-9d11-abc15c2f647b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1279.778648] env[61545]: DEBUG oslo_concurrency.lockutils [req-4683533c-0f6e-4f2d-8be5-42342cbdba62 req-60442456-9d41-46bf-b3d0-bf8a6cfbe4dd service nova] Acquiring lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.778648] env[61545]: DEBUG oslo_concurrency.lockutils [req-4683533c-0f6e-4f2d-8be5-42342cbdba62 req-60442456-9d41-46bf-b3d0-bf8a6cfbe4dd service nova] Acquired lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.778648] env[61545]: DEBUG nova.network.neutron [req-4683533c-0f6e-4f2d-8be5-42342cbdba62 req-60442456-9d41-46bf-b3d0-bf8a6cfbe4dd service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing network info cache for port 735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1279.889754] env[61545]: INFO nova.compute.manager [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Took 14.14 seconds to build instance. [ 1279.953467] env[61545]: DEBUG nova.network.neutron [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1280.029973] env[61545]: DEBUG oslo_vmware.api [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256905, 'name': PowerOnVM_Task, 'duration_secs': 0.725477} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.030263] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Resumed the VM {{(pid=61545) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1280.030470] env[61545]: DEBUG nova.compute.manager [None req-4b9f2837-d9ca-4a3a-9c39-b45042ea10f8 tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1280.031291] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b226c385-d78d-444f-8172-8956fbd586d7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.097903] env[61545]: DEBUG nova.network.neutron [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Updating instance_info_cache with network_info: [{"id": "a5c32d4d-c669-44cf-891b-60956afffddd", "address": "fa:16:3e:3b:3f:e3", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5c32d4d-c6", "ovs_interfaceid": "a5c32d4d-c669-44cf-891b-60956afffddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.341883] env[61545]: DEBUG nova.compute.manager [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received event network-changed-735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1280.342233] env[61545]: DEBUG nova.compute.manager [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing instance network info cache due to event network-changed-735d1ad5-27dd-48fe-9d11-abc15c2f647b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1280.342451] env[61545]: DEBUG oslo_concurrency.lockutils [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] Acquiring lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.393257] env[61545]: DEBUG oslo_concurrency.lockutils [None req-dc1db067-e6a0-40d8-badc-562eb33b58a6 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.659s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.600534] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "refresh_cache-22437c4c-0e0b-4191-b079-3f6b7031656c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.600953] env[61545]: DEBUG nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Instance network_info: |[{"id": "a5c32d4d-c669-44cf-891b-60956afffddd", "address": "fa:16:3e:3b:3f:e3", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5c32d4d-c6", "ovs_interfaceid": "a5c32d4d-c669-44cf-891b-60956afffddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1280.601575] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:3f:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a071ecf4-e713-4f97-9271-8c17952f6dee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5c32d4d-c669-44cf-891b-60956afffddd', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1280.610640] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1280.611912] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1280.612286] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0be550eb-3313-47c3-b734-12fa2e03efdb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.633420] env[61545]: DEBUG nova.network.neutron [req-4683533c-0f6e-4f2d-8be5-42342cbdba62 req-60442456-9d41-46bf-b3d0-bf8a6cfbe4dd service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updated VIF entry in instance network info cache for port 735d1ad5-27dd-48fe-9d11-abc15c2f647b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1280.634075] env[61545]: DEBUG nova.network.neutron [req-4683533c-0f6e-4f2d-8be5-42342cbdba62 req-60442456-9d41-46bf-b3d0-bf8a6cfbe4dd service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.642362] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.642362] env[61545]: value = "task-4256907" [ 1280.642362] env[61545]: _type = "Task" [ 1280.642362] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.657714] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256907, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.137651] env[61545]: DEBUG oslo_concurrency.lockutils [req-4683533c-0f6e-4f2d-8be5-42342cbdba62 req-60442456-9d41-46bf-b3d0-bf8a6cfbe4dd service nova] Releasing lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.138118] env[61545]: DEBUG oslo_concurrency.lockutils [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] Acquired lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.138320] env[61545]: DEBUG nova.network.neutron [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing network info cache for port 735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1281.153646] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256907, 'name': CreateVM_Task, 'duration_secs': 0.432889} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.153820] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1281.154519] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.154688] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.155022] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1281.155290] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2133ed2e-574e-4c52-b480-f3c979dcdc2c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.160932] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1281.160932] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d316f4-1d5c-fd21-d2ac-1af83dd3dcfa" [ 1281.160932] env[61545]: _type = "Task" [ 1281.160932] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.170856] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d316f4-1d5c-fd21-d2ac-1af83dd3dcfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.341101] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.341101] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.341354] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.341482] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.341640] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.343897] env[61545]: INFO nova.compute.manager [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Terminating instance [ 1281.432315] env[61545]: INFO nova.compute.manager [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Rebuilding instance [ 1281.482351] env[61545]: DEBUG nova.compute.manager [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1281.483742] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5952b158-fe84-49c3-9b76-7e16795033d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.673865] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d316f4-1d5c-fd21-d2ac-1af83dd3dcfa, 'name': SearchDatastore_Task, 'duration_secs': 0.016747} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.673865] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.674095] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1281.674387] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.674571] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.674761] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.675063] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e855470c-c37c-47d3-ac89-4707de7c6202 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.686400] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.686625] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1281.687453] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c83107d-94d9-4f52-b841-d70616c77666 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.694641] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1281.694641] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e2df93-adf8-9af3-3313-b47cc521a92e" [ 1281.694641] env[61545]: _type = "Task" [ 1281.694641] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.708547] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e2df93-adf8-9af3-3313-b47cc521a92e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.809655] env[61545]: DEBUG nova.compute.manager [req-6efb887e-49f7-457c-a281-80751232df3a req-188e3c90-efeb-4344-aafd-4bc184c109a0 service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1281.809845] env[61545]: DEBUG nova.compute.manager [req-6efb887e-49f7-457c-a281-80751232df3a req-188e3c90-efeb-4344-aafd-4bc184c109a0 service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing instance network info cache due to event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1281.810415] env[61545]: DEBUG oslo_concurrency.lockutils [req-6efb887e-49f7-457c-a281-80751232df3a req-188e3c90-efeb-4344-aafd-4bc184c109a0 service nova] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.810675] env[61545]: DEBUG oslo_concurrency.lockutils [req-6efb887e-49f7-457c-a281-80751232df3a req-188e3c90-efeb-4344-aafd-4bc184c109a0 service nova] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.810947] env[61545]: DEBUG nova.network.neutron [req-6efb887e-49f7-457c-a281-80751232df3a req-188e3c90-efeb-4344-aafd-4bc184c109a0 service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1281.846954] env[61545]: DEBUG nova.compute.manager [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1281.847222] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1281.848144] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c0c34c-7208-49fd-81af-ab7bd307829d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.857184] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1281.857473] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76194976-cd50-49f8-9044-8d48a875b952 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.864976] env[61545]: DEBUG oslo_vmware.api [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1281.864976] env[61545]: value = "task-4256908" [ 1281.864976] env[61545]: _type = "Task" [ 1281.864976] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.874884] env[61545]: DEBUG oslo_vmware.api [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256908, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.932722] env[61545]: DEBUG nova.network.neutron [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updated VIF entry in instance network info cache for port 735d1ad5-27dd-48fe-9d11-abc15c2f647b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1281.932978] env[61545]: DEBUG nova.network.neutron [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.207599] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e2df93-adf8-9af3-3313-b47cc521a92e, 'name': SearchDatastore_Task, 'duration_secs': 0.015478} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.208681] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1685d00a-a1bb-469b-bf39-93f9cdd95e35 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.215553] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1282.215553] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d92a6-2d97-d217-5054-60517e4d90e9" [ 1282.215553] env[61545]: _type = "Task" [ 1282.215553] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.224558] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d92a6-2d97-d217-5054-60517e4d90e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.376797] env[61545]: DEBUG oslo_vmware.api [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256908, 'name': PowerOffVM_Task, 'duration_secs': 0.213696} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.377115] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1282.377288] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1282.377550] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d97e7ccd-20ed-4fff-9978-d094a35fce71 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.435481] env[61545]: DEBUG oslo_concurrency.lockutils [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] Releasing lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.435898] env[61545]: DEBUG nova.compute.manager [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Received event network-changed-a5c32d4d-c669-44cf-891b-60956afffddd {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1282.435943] env[61545]: DEBUG nova.compute.manager [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Refreshing instance network info cache due to event network-changed-a5c32d4d-c669-44cf-891b-60956afffddd. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1282.436159] env[61545]: DEBUG oslo_concurrency.lockutils [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] Acquiring lock "refresh_cache-22437c4c-0e0b-4191-b079-3f6b7031656c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.436327] env[61545]: DEBUG oslo_concurrency.lockutils [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] Acquired lock "refresh_cache-22437c4c-0e0b-4191-b079-3f6b7031656c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.436512] env[61545]: DEBUG nova.network.neutron [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Refreshing network info cache for port a5c32d4d-c669-44cf-891b-60956afffddd {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.456920] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1282.457267] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1282.457581] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleting the datastore file [datastore2] fde00c6e-29b5-4b99-944a-c0404e4f2fae {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1282.458071] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9b14933-3680-4a7a-9889-6fc9e8a4a724 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.467100] env[61545]: DEBUG oslo_vmware.api [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for the task: (returnval){ [ 1282.467100] env[61545]: value = "task-4256910" [ 1282.467100] env[61545]: _type = "Task" [ 1282.467100] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.480781] env[61545]: DEBUG oslo_vmware.api [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.502896] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1282.503269] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8fd2c05-e5b4-462a-babf-69b6c7923a81 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.514529] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1282.514529] env[61545]: value = "task-4256911" [ 1282.514529] env[61545]: _type = "Task" [ 1282.514529] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.527315] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.543793] env[61545]: DEBUG nova.network.neutron [req-6efb887e-49f7-457c-a281-80751232df3a req-188e3c90-efeb-4344-aafd-4bc184c109a0 service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updated VIF entry in instance network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1282.544345] env[61545]: DEBUG nova.network.neutron [req-6efb887e-49f7-457c-a281-80751232df3a req-188e3c90-efeb-4344-aafd-4bc184c109a0 service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.727652] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525d92a6-2d97-d217-5054-60517e4d90e9, 'name': SearchDatastore_Task, 'duration_secs': 0.018194} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.727921] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.728241] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 22437c4c-0e0b-4191-b079-3f6b7031656c/22437c4c-0e0b-4191-b079-3f6b7031656c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1282.728564] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1d2fd70-0fc0-4905-a253-a29193f29321 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.738130] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1282.738130] env[61545]: value = "task-4256913" [ 1282.738130] env[61545]: _type = "Task" [ 1282.738130] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.748027] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.979858] env[61545]: DEBUG oslo_vmware.api [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Task: {'id': task-4256910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302235} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.980141] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1282.980330] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1282.980506] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1282.980680] env[61545]: INFO nova.compute.manager [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1282.980919] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1282.981165] env[61545]: DEBUG nova.compute.manager [-] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1282.981281] env[61545]: DEBUG nova.network.neutron [-] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1283.024977] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256911, 'name': PowerOffVM_Task, 'duration_secs': 0.26291} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.027233] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1283.027479] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1283.028351] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056260c8-8183-4933-8d51-ff409b5037d8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.041197] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1283.041495] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c05e8bb5-6af8-43c5-9afa-02a2468f2a77 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.046837] env[61545]: DEBUG oslo_concurrency.lockutils [req-6efb887e-49f7-457c-a281-80751232df3a req-188e3c90-efeb-4344-aafd-4bc184c109a0 service nova] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.151316] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1283.151316] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1283.151316] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleting the datastore file [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1283.151316] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38f88d24-729b-4fae-810c-1d4eed8117ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.157275] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1283.157275] env[61545]: value = "task-4256915" [ 1283.157275] env[61545]: _type = "Task" [ 1283.157275] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.167735] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.250137] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256913, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.362354] env[61545]: DEBUG nova.network.neutron [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Updated VIF entry in instance network info cache for port a5c32d4d-c669-44cf-891b-60956afffddd. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.362777] env[61545]: DEBUG nova.network.neutron [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Updating instance_info_cache with network_info: [{"id": "a5c32d4d-c669-44cf-891b-60956afffddd", "address": "fa:16:3e:3b:3f:e3", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5c32d4d-c6", "ovs_interfaceid": "a5c32d4d-c669-44cf-891b-60956afffddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.668185] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.747305] env[61545]: DEBUG nova.compute.manager [req-4f2e385f-7298-4f96-be7d-93d4b4c30870 req-951e252e-7f23-43a6-a1cd-e0bffaa01cb2 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Received event network-vif-deleted-4bd4ffa1-26c1-49ba-b154-0c04a3294c9d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1283.747305] env[61545]: INFO nova.compute.manager [req-4f2e385f-7298-4f96-be7d-93d4b4c30870 req-951e252e-7f23-43a6-a1cd-e0bffaa01cb2 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Neutron deleted interface 4bd4ffa1-26c1-49ba-b154-0c04a3294c9d; detaching it from the instance and deleting it from the info cache [ 1283.747506] env[61545]: DEBUG nova.network.neutron [req-4f2e385f-7298-4f96-be7d-93d4b4c30870 req-951e252e-7f23-43a6-a1cd-e0bffaa01cb2 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.752182] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727394} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.753019] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 22437c4c-0e0b-4191-b079-3f6b7031656c/22437c4c-0e0b-4191-b079-3f6b7031656c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1283.753236] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1283.753523] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-efd58f66-7537-4c11-88fd-9cafa17af5d3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.761858] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1283.761858] env[61545]: value = "task-4256916" [ 1283.761858] env[61545]: _type = "Task" [ 1283.761858] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.774202] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256916, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.866699] env[61545]: DEBUG oslo_concurrency.lockutils [req-a76026eb-fd43-43b3-8066-51b1423f4f3d req-1f7b43d5-99d3-480a-bdea-41cc82020247 service nova] Releasing lock "refresh_cache-22437c4c-0e0b-4191-b079-3f6b7031656c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.170455] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.548908} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.170659] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1284.170842] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1284.171028] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1284.223933] env[61545]: DEBUG nova.network.neutron [-] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.253732] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6913bcce-5aef-4d1c-b8ab-b2edf0b7ce80 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.269364] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd5b8a3-a134-44e7-a67e-b6bd1a8a8360 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.286823] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256916, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120335} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.287133] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1284.287979] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6cc67c-2b85-4eb4-bf56-16cf74a6bbd7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.302574] env[61545]: DEBUG nova.compute.manager [req-4f2e385f-7298-4f96-be7d-93d4b4c30870 req-951e252e-7f23-43a6-a1cd-e0bffaa01cb2 service nova] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Detach interface failed, port_id=4bd4ffa1-26c1-49ba-b154-0c04a3294c9d, reason: Instance fde00c6e-29b5-4b99-944a-c0404e4f2fae could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1284.320739] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 22437c4c-0e0b-4191-b079-3f6b7031656c/22437c4c-0e0b-4191-b079-3f6b7031656c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1284.321524] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a752d8eb-22a9-42c3-829e-ce2d96746346 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.344017] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1284.344017] env[61545]: value = "task-4256917" [ 1284.344017] env[61545]: _type = "Task" [ 1284.344017] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.354474] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256917, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.726588] env[61545]: INFO nova.compute.manager [-] [instance: fde00c6e-29b5-4b99-944a-c0404e4f2fae] Took 1.75 seconds to deallocate network for instance. [ 1284.857287] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256917, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.194226] env[61545]: DEBUG oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804eb0-516f-1631-44f1-98ddd87f8f44/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1285.195155] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bac7b7a-4183-487a-bac7-bd18e1e3b66e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.205759] env[61545]: DEBUG oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804eb0-516f-1631-44f1-98ddd87f8f44/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1285.205918] env[61545]: ERROR oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804eb0-516f-1631-44f1-98ddd87f8f44/disk-0.vmdk due to incomplete transfer. [ 1285.208133] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1285.208367] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1285.208525] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1285.208713] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1285.208844] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1285.208987] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1285.209230] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1285.209402] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1285.209571] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1285.209730] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1285.209899] env[61545]: DEBUG nova.virt.hardware [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1285.210176] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bc5bf73c-6c01-49e4-b4d3-ff4a9f782acb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.212360] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff2c715-8681-473d-a16d-915c116e8aa1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.220912] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b230bf5e-29d4-4770-a2e1-69b6e8b2a642 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.225634] env[61545]: DEBUG oslo_vmware.rw_handles [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804eb0-516f-1631-44f1-98ddd87f8f44/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1285.225820] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Uploaded image 86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c to the Glance image server {{(pid=61545) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1285.228123] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Destroying the VM {{(pid=61545) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1285.228741] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5ab64216-8249-40ae-b4a3-8bf55fd68484 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.240380] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.240667] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.240956] env[61545]: DEBUG nova.objects.instance [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lazy-loading 'resources' on Instance uuid fde00c6e-29b5-4b99-944a-c0404e4f2fae {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1285.242522] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:12:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf35a0bc-a18e-493b-b59c-794f2b962cd7', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1285.250624] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1285.253319] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1285.253670] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1285.253670] env[61545]: value = "task-4256919" [ 1285.253670] env[61545]: _type = "Task" [ 1285.253670] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.253866] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40526166-a39a-4c78-8447-70bf3cd3c3b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.278919] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256919, 'name': Destroy_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.280359] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1285.280359] env[61545]: value = "task-4256920" [ 1285.280359] env[61545]: _type = "Task" [ 1285.280359] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.289129] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256920, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.356976] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256917, 'name': ReconfigVM_Task, 'duration_secs': 0.597302} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.357248] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 22437c4c-0e0b-4191-b079-3f6b7031656c/22437c4c-0e0b-4191-b079-3f6b7031656c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1285.357938] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35eae1b1-d308-4ff0-8611-253ccfe193e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.366029] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1285.366029] env[61545]: value = "task-4256921" [ 1285.366029] env[61545]: _type = "Task" [ 1285.366029] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.375942] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256921, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.781627] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256919, 'name': Destroy_Task, 'duration_secs': 0.400231} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.784765] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Destroyed the VM [ 1285.785450] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Deleting Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1285.785450] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c8ceff37-0697-4d03-897e-6c5b0fc40ba6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.793479] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256920, 'name': CreateVM_Task, 'duration_secs': 0.506433} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.794836] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1285.795263] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1285.795263] env[61545]: value = "task-4256922" [ 1285.795263] env[61545]: _type = "Task" [ 1285.795263] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.796107] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.796346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.796716] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1285.797053] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aef0156-9ec3-45bf-be64-3a6d1cbbeaf4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.804843] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1285.804843] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5239b544-5551-0279-40a5-5af42ed3fb5f" [ 1285.804843] env[61545]: _type = "Task" [ 1285.804843] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.808050] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256922, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.819356] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5239b544-5551-0279-40a5-5af42ed3fb5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.876851] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256921, 'name': Rename_Task, 'duration_secs': 0.252141} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.877200] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1285.877523] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfea233b-122b-4127-b333-867686570317 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.884872] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1285.884872] env[61545]: value = "task-4256923" [ 1285.884872] env[61545]: _type = "Task" [ 1285.884872] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.894693] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256923, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.924752] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c22da3d-20b6-4f2a-b1ac-795e9fd5f1b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.932835] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350044d3-c05c-401f-bbd9-64a41b1bb98a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.965670] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd895ba-e5d9-4e61-9f70-6643bdef026e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.974563] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13e8ff9-c6bb-4d7f-9f52-ee3f394dface {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.989501] env[61545]: DEBUG nova.compute.provider_tree [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1286.309566] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256922, 'name': RemoveSnapshot_Task, 'duration_secs': 0.503036} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.313079] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Deleted Snapshot of the VM instance {{(pid=61545) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1286.313490] env[61545]: DEBUG nova.compute.manager [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1286.314323] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba8b61d-0ed5-4f5c-9efd-8259eda0e2f3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.325758] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5239b544-5551-0279-40a5-5af42ed3fb5f, 'name': SearchDatastore_Task, 'duration_secs': 0.022154} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.328021] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.328318] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1286.328580] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.328729] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.328914] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1286.331732] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-972260c0-71fc-426f-94f8-4325894be5f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.344891] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1286.345165] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1286.346087] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0ab8bc7-9057-47cc-82f9-ac2bcf2dcd6c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.353049] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1286.353049] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5252a2c7-67ee-bb04-c9f3-5d1cb3f9e6fc" [ 1286.353049] env[61545]: _type = "Task" [ 1286.353049] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.362269] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5252a2c7-67ee-bb04-c9f3-5d1cb3f9e6fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.395226] env[61545]: DEBUG oslo_vmware.api [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256923, 'name': PowerOnVM_Task, 'duration_secs': 0.4787} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.395541] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1286.395736] env[61545]: INFO nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1286.395915] env[61545]: DEBUG nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1286.396830] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0734f19b-17ca-49ce-8097-e311518d7e9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.492944] env[61545]: DEBUG nova.scheduler.client.report [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1286.838314] env[61545]: INFO nova.compute.manager [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Shelve offloading [ 1286.865210] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5252a2c7-67ee-bb04-c9f3-5d1cb3f9e6fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010718} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.866201] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d73fca39-2408-4053-9eb1-2a4dcc663647 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.872766] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1286.872766] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523a9d63-56d7-b33c-1275-8a2147cc8f03" [ 1286.872766] env[61545]: _type = "Task" [ 1286.872766] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.881922] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523a9d63-56d7-b33c-1275-8a2147cc8f03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.914481] env[61545]: INFO nova.compute.manager [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Took 13.72 seconds to build instance. [ 1286.997961] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.757s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.025133] env[61545]: INFO nova.scheduler.client.report [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Deleted allocations for instance fde00c6e-29b5-4b99-944a-c0404e4f2fae [ 1287.343295] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1287.343617] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4c3c3d4-5fbb-4a42-b743-1b20175de2fa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.352532] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1287.352532] env[61545]: value = "task-4256924" [ 1287.352532] env[61545]: _type = "Task" [ 1287.352532] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.363286] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] VM already powered off {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1287.363286] env[61545]: DEBUG nova.compute.manager [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1287.364189] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7ec32c-c322-467f-b8a5-05e29b38befb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.372500] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.372808] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.373104] env[61545]: DEBUG nova.network.neutron [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1287.384339] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523a9d63-56d7-b33c-1275-8a2147cc8f03, 'name': SearchDatastore_Task, 'duration_secs': 0.016944} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.385287] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.385546] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c/5e047505-5466-4d89-bdd0-ebe6ac7fef0c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1287.385799] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d1168bf-4569-4a13-b351-91c43cc90799 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.393320] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1287.393320] env[61545]: value = "task-4256925" [ 1287.393320] env[61545]: _type = "Task" [ 1287.393320] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.401783] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.415848] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ff0f141c-d708-482e-b9d5-10f1e0ea6542 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "22437c4c-0e0b-4191-b079-3f6b7031656c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.236s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.534032] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ec8a9ce0-992a-457e-b23e-fc269aa46f8a tempest-ServerActionsTestJSON-1581508737 tempest-ServerActionsTestJSON-1581508737-project-member] Lock "fde00c6e-29b5-4b99-944a-c0404e4f2fae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.193s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.862588] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.862917] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.906553] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48696} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.906879] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c/5e047505-5466-4d89-bdd0-ebe6ac7fef0c.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1287.907109] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1287.907385] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59e69fa9-9de4-4c74-bccb-e0f3dd931cca {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.918441] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1287.918441] env[61545]: value = "task-4256926" [ 1287.918441] env[61545]: _type = "Task" [ 1287.918441] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.930363] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.973416] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.973714] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.973933] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.974134] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.974317] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.976850] env[61545]: INFO nova.compute.manager [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Terminating instance [ 1288.048766] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.049032] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.205798] env[61545]: DEBUG nova.network.neutron [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [{"id": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "address": "fa:16:3e:09:25:39", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff62e994-2e", "ovs_interfaceid": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.365848] env[61545]: DEBUG nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1288.430813] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067115} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.431545] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1288.431979] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a43a37-ec97-4044-a895-e3ffd57a359b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.457074] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c/5e047505-5466-4d89-bdd0-ebe6ac7fef0c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1288.457074] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78e8a3fe-6c32-4d01-b4a2-6afe01d837be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.478873] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1288.478873] env[61545]: value = "task-4256927" [ 1288.478873] env[61545]: _type = "Task" [ 1288.478873] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.482901] env[61545]: DEBUG nova.compute.manager [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1288.483134] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1288.483923] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa497a3-dc9c-4581-88e7-e8bafb814659 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.491761] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1288.495473] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70890163-4b2b-4f59-b9a8-87b005a92222 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.497597] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256927, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.505213] env[61545]: DEBUG oslo_vmware.api [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1288.505213] env[61545]: value = "task-4256928" [ 1288.505213] env[61545]: _type = "Task" [ 1288.505213] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.514629] env[61545]: DEBUG oslo_vmware.api [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.555865] env[61545]: DEBUG nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1288.709214] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1288.892976] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.893366] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.895072] env[61545]: INFO nova.compute.claims [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1288.988057] env[61545]: DEBUG nova.compute.manager [req-2366d30f-5183-41c6-9eae-75e836b81c59 req-10f4619c-c757-40b5-a83f-a72ffadba4b5 service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received event network-vif-unplugged-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1288.988300] env[61545]: DEBUG oslo_concurrency.lockutils [req-2366d30f-5183-41c6-9eae-75e836b81c59 req-10f4619c-c757-40b5-a83f-a72ffadba4b5 service nova] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.988510] env[61545]: DEBUG oslo_concurrency.lockutils [req-2366d30f-5183-41c6-9eae-75e836b81c59 req-10f4619c-c757-40b5-a83f-a72ffadba4b5 service nova] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.989622] env[61545]: DEBUG oslo_concurrency.lockutils [req-2366d30f-5183-41c6-9eae-75e836b81c59 req-10f4619c-c757-40b5-a83f-a72ffadba4b5 service nova] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.989955] env[61545]: DEBUG nova.compute.manager [req-2366d30f-5183-41c6-9eae-75e836b81c59 req-10f4619c-c757-40b5-a83f-a72ffadba4b5 service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] No waiting events found dispatching network-vif-unplugged-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1288.990160] env[61545]: WARNING nova.compute.manager [req-2366d30f-5183-41c6-9eae-75e836b81c59 req-10f4619c-c757-40b5-a83f-a72ffadba4b5 service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received unexpected event network-vif-unplugged-ff62e994-2e58-433b-884f-5b4fa7639d6b for instance with vm_state shelved and task_state shelving_offloading. [ 1288.998083] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256927, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.014701] env[61545]: DEBUG oslo_vmware.api [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256928, 'name': PowerOffVM_Task, 'duration_secs': 0.239867} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.014701] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1289.014944] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1289.015122] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-220b6725-535f-4b1c-8b06-9660c531c634 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.082206] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1289.082534] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1289.082728] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleting the datastore file [datastore2] bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1289.083036] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c697f929-a94c-43eb-a7be-6280ce76d722 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.090061] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.091723] env[61545]: DEBUG oslo_vmware.api [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for the task: (returnval){ [ 1289.091723] env[61545]: value = "task-4256930" [ 1289.091723] env[61545]: _type = "Task" [ 1289.091723] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.096261] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1289.097375] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a80809e-ecfe-432e-968a-b98105ef8c3d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.105607] env[61545]: DEBUG oslo_vmware.api [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256930, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.108252] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1289.108252] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9b24eeb-7e3b-43c3-8a70-0924b36c5cfa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.182384] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1289.183099] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1289.183357] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleting the datastore file [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1289.183679] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53f576a9-8680-4f9e-b820-0beaa388f97c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.192776] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1289.192776] env[61545]: value = "task-4256932" [ 1289.192776] env[61545]: _type = "Task" [ 1289.192776] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.203054] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.490671] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256927, 'name': ReconfigVM_Task, 'duration_secs': 0.784399} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.490979] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c/5e047505-5466-4d89-bdd0-ebe6ac7fef0c.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1289.491687] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdb04c33-f599-4598-9c05-78bbc7aafaea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.498426] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1289.498426] env[61545]: value = "task-4256933" [ 1289.498426] env[61545]: _type = "Task" [ 1289.498426] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.511363] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256933, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.608192] env[61545]: DEBUG oslo_vmware.api [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Task: {'id': task-4256930, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156587} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.608192] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.608192] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.608192] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.608192] env[61545]: INFO nova.compute.manager [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1289.608192] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1289.608699] env[61545]: DEBUG nova.compute.manager [-] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1289.608896] env[61545]: DEBUG nova.network.neutron [-] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1289.703522] env[61545]: DEBUG oslo_vmware.api [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164246} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.703804] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.703989] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.704791] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.742605] env[61545]: INFO nova.scheduler.client.report [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted allocations for instance 40bade64-b16b-4a33-a9ea-18f80a32c6bc [ 1290.011377] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256933, 'name': Rename_Task, 'duration_secs': 0.328424} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.011886] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.013097] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af498bb3-c39c-4b1a-8b24-d0fd804712c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.019296] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1290.019296] env[61545]: value = "task-4256934" [ 1290.019296] env[61545]: _type = "Task" [ 1290.019296] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.032425] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.072329] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f15aa6-77d2-4178-b4fa-ea6cd34d1ba2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.080037] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edba4de-5df3-458d-a917-b9c0ccc39750 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.114030] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a310ea-0338-46e5-ba95-08506f340749 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.122053] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5368473c-e795-44ad-8891-7162fff654f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.137192] env[61545]: DEBUG nova.compute.provider_tree [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.248212] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.533664] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256934, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.640987] env[61545]: DEBUG nova.scheduler.client.report [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1290.756452] env[61545]: DEBUG nova.network.neutron [-] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.035246] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256934, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.077348] env[61545]: DEBUG nova.compute.manager [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received event network-changed-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1291.077538] env[61545]: DEBUG nova.compute.manager [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Refreshing instance network info cache due to event network-changed-ff62e994-2e58-433b-884f-5b4fa7639d6b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1291.077752] env[61545]: DEBUG oslo_concurrency.lockutils [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] Acquiring lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.077898] env[61545]: DEBUG oslo_concurrency.lockutils [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] Acquired lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1291.078075] env[61545]: DEBUG nova.network.neutron [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Refreshing network info cache for port ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1291.147087] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.147812] env[61545]: DEBUG nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1291.150504] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.061s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.152091] env[61545]: INFO nova.compute.claims [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1291.259046] env[61545]: INFO nova.compute.manager [-] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Took 1.65 seconds to deallocate network for instance. [ 1291.533825] env[61545]: DEBUG oslo_vmware.api [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256934, 'name': PowerOnVM_Task, 'duration_secs': 1.309479} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.534114] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1291.534332] env[61545]: DEBUG nova.compute.manager [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1291.535183] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2ec1c9-af2f-4fdd-b193-b02ae9566dd9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.656933] env[61545]: DEBUG nova.compute.utils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1291.662023] env[61545]: DEBUG nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1291.662023] env[61545]: DEBUG nova.network.neutron [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1291.732429] env[61545]: DEBUG nova.policy [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d0d78511dd5408cba4db4e57271b5c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b790c7b2af394de28f7f42ce0d230346', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1291.767732] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.926323] env[61545]: DEBUG nova.network.neutron [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updated VIF entry in instance network info cache for port ff62e994-2e58-433b-884f-5b4fa7639d6b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1291.926323] env[61545]: DEBUG nova.network.neutron [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [{"id": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "address": "fa:16:3e:09:25:39", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapff62e994-2e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.058788] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.092876] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-67104b0f-71e5-40be-965b-8376a6c120f8" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.093175] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-67104b0f-71e5-40be-965b-8376a6c120f8" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.093550] env[61545]: DEBUG nova.objects.instance [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'flavor' on Instance uuid e24a6086-7dd1-4e75-b49e-dcc7c28eaea8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.109997] env[61545]: DEBUG nova.network.neutron [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Successfully created port: 247b84fb-fa03-4324-adea-ff8f63fd6ec0 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1292.164031] env[61545]: DEBUG nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1292.359985] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bf0d42-2d8d-47ed-8166-63901b479c6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.372420] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05aae8f0-664a-493b-b841-95cf9110f932 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.376305] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.408923] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc72cab0-9f8d-4f6f-a284-b03083f15812 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.422430] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df44aab-aeef-4cc5-b950-f7c82f24d265 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.436179] env[61545]: DEBUG oslo_concurrency.lockutils [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] Releasing lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1292.436484] env[61545]: DEBUG nova.compute.manager [req-60b1b304-d3a3-45db-b1f1-466f9a4c6555 req-5635859d-28e2-4f04-b31a-7e74ee7000cf service nova] [instance: bd259162-c8ea-4408-9b7c-c91b9fbfc0d2] Received event network-vif-deleted-e06d5cf8-450e-488f-8ba6-9e7d62811ba1 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1292.437108] env[61545]: DEBUG nova.compute.provider_tree [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.748283] env[61545]: DEBUG nova.objects.instance [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'pci_requests' on Instance uuid e24a6086-7dd1-4e75-b49e-dcc7c28eaea8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.941950] env[61545]: DEBUG nova.scheduler.client.report [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1293.175240] env[61545]: DEBUG nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1293.204509] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1293.204768] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1293.204924] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1293.205130] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1293.205288] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1293.205442] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1293.205644] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1293.205802] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1293.205968] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1293.206147] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1293.206326] env[61545]: DEBUG nova.virt.hardware [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1293.207240] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d973d0-a552-4ae1-bb48-3eaee6e6e75c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.216886] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca897c9-fa04-4262-9771-5d57da4cbec4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.253238] env[61545]: DEBUG nova.objects.base [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1293.253479] env[61545]: DEBUG nova.network.neutron [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1293.449111] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.298s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.449111] env[61545]: DEBUG nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1293.451931] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.204s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.452163] env[61545]: DEBUG nova.objects.instance [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'resources' on Instance uuid 40bade64-b16b-4a33-a9ea-18f80a32c6bc {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1293.483276] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.483276] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.483455] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.483681] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.483862] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.487288] env[61545]: DEBUG nova.policy [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1293.489960] env[61545]: INFO nova.compute.manager [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Terminating instance [ 1293.956678] env[61545]: DEBUG nova.compute.utils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1293.957531] env[61545]: DEBUG nova.objects.instance [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'numa_topology' on Instance uuid 40bade64-b16b-4a33-a9ea-18f80a32c6bc {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1293.961917] env[61545]: DEBUG nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1293.961917] env[61545]: DEBUG nova.network.neutron [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1293.994434] env[61545]: DEBUG nova.compute.manager [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1293.994711] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1293.995709] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc87d24-1f22-493d-9abe-07313c6a27aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.006380] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1294.006752] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1798c347-dced-4a7a-ac5c-600ab481758f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.015228] env[61545]: DEBUG oslo_vmware.api [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1294.015228] env[61545]: value = "task-4256935" [ 1294.015228] env[61545]: _type = "Task" [ 1294.015228] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.025321] env[61545]: DEBUG oslo_vmware.api [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.114120] env[61545]: DEBUG nova.policy [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82de8ada56cd46319fe4c7ecd4957abb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da39b1ee6df640b89a9dab58e3380397', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1294.288561] env[61545]: DEBUG nova.network.neutron [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Successfully updated port: 247b84fb-fa03-4324-adea-ff8f63fd6ec0 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1294.452086] env[61545]: DEBUG nova.compute.manager [req-e303ed7c-a307-4113-8a52-fa3b03146152 req-2e09a38e-9194-4578-9b3b-37f89c927b2b service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Received event network-vif-plugged-247b84fb-fa03-4324-adea-ff8f63fd6ec0 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1294.453192] env[61545]: DEBUG oslo_concurrency.lockutils [req-e303ed7c-a307-4113-8a52-fa3b03146152 req-2e09a38e-9194-4578-9b3b-37f89c927b2b service nova] Acquiring lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1294.453416] env[61545]: DEBUG oslo_concurrency.lockutils [req-e303ed7c-a307-4113-8a52-fa3b03146152 req-2e09a38e-9194-4578-9b3b-37f89c927b2b service nova] Lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1294.453658] env[61545]: DEBUG oslo_concurrency.lockutils [req-e303ed7c-a307-4113-8a52-fa3b03146152 req-2e09a38e-9194-4578-9b3b-37f89c927b2b service nova] Lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.453940] env[61545]: DEBUG nova.compute.manager [req-e303ed7c-a307-4113-8a52-fa3b03146152 req-2e09a38e-9194-4578-9b3b-37f89c927b2b service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] No waiting events found dispatching network-vif-plugged-247b84fb-fa03-4324-adea-ff8f63fd6ec0 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1294.454012] env[61545]: WARNING nova.compute.manager [req-e303ed7c-a307-4113-8a52-fa3b03146152 req-2e09a38e-9194-4578-9b3b-37f89c927b2b service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Received unexpected event network-vif-plugged-247b84fb-fa03-4324-adea-ff8f63fd6ec0 for instance with vm_state building and task_state spawning. [ 1294.459568] env[61545]: DEBUG nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1294.462694] env[61545]: DEBUG nova.objects.base [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Object Instance<40bade64-b16b-4a33-a9ea-18f80a32c6bc> lazy-loaded attributes: resources,numa_topology {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1294.518155] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "e7af11e5-7500-4fc7-8c68-651376dff297" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1294.519657] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1294.533100] env[61545]: DEBUG oslo_vmware.api [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256935, 'name': PowerOffVM_Task, 'duration_secs': 0.215659} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.535277] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1294.535277] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1294.535277] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eeee243b-726d-4696-bb8c-77e2346e2ef9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.616025] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1294.616992] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1294.616992] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleting the datastore file [datastore2] 5e047505-5466-4d89-bdd0-ebe6ac7fef0c {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1294.617283] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b93f34f-3eaf-4231-bab8-86330bb55c44 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.627450] env[61545]: DEBUG oslo_vmware.api [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1294.627450] env[61545]: value = "task-4256937" [ 1294.627450] env[61545]: _type = "Task" [ 1294.627450] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.641333] env[61545]: DEBUG oslo_vmware.api [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256937, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.643120] env[61545]: DEBUG nova.network.neutron [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Successfully created port: 9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1294.703784] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c3e4f6-365a-4a0c-a780-86ba01adb5a1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.712372] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b3c521-c119-4f5c-ba8c-2221d7d1b9af {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.746807] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b668ddbb-6f37-4f93-bcd2-0115b4a283a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.755801] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc014840-0534-4a22-9134-77f1d3b428e2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.775591] env[61545]: DEBUG nova.compute.provider_tree [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.793192] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "refresh_cache-fee61bcc-8870-4f3e-8ab7-ac12ab41a826" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.793379] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "refresh_cache-fee61bcc-8870-4f3e-8ab7-ac12ab41a826" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1294.793491] env[61545]: DEBUG nova.network.neutron [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1294.968693] env[61545]: INFO nova.virt.block_device [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Booting with volume f670e2ee-2d66-439c-be73-79914f3d6fd5 at /dev/sda [ 1295.026442] env[61545]: DEBUG nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1295.031357] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82d4beb4-7059-410d-ac3c-495a57c1d1f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.045386] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb4eba9-4ab0-451d-87a2-41387395f569 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.085763] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-680959fc-3ddd-494e-9070-8c569de0525d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.097876] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5d2d38-53fa-4f96-a042-1dba3546710b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.135791] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d30243-78e2-408e-805e-5dda16437b1f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.146951] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe1a639-bda9-492c-bf2e-2c608521b536 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.149652] env[61545]: DEBUG oslo_vmware.api [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256937, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154154} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.150011] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1295.150266] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1295.150541] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1295.150803] env[61545]: INFO nova.compute.manager [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1295.151088] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1295.151739] env[61545]: DEBUG nova.compute.manager [-] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1295.152285] env[61545]: DEBUG nova.network.neutron [-] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1295.164733] env[61545]: DEBUG nova.virt.block_device [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating existing volume attachment record: ff351564-1c8b-4596-9aca-394d09ed51ff {{(pid=61545) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1295.279321] env[61545]: DEBUG nova.scheduler.client.report [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1295.357957] env[61545]: DEBUG nova.network.neutron [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1295.559441] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.759283] env[61545]: DEBUG nova.network.neutron [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Updating instance_info_cache with network_info: [{"id": "247b84fb-fa03-4324-adea-ff8f63fd6ec0", "address": "fa:16:3e:3a:c6:bb", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap247b84fb-fa", "ovs_interfaceid": "247b84fb-fa03-4324-adea-ff8f63fd6ec0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.783884] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.332s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.788220] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.020s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.788220] env[61545]: DEBUG nova.objects.instance [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lazy-loading 'resources' on Instance uuid bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1295.967267] env[61545]: DEBUG nova.network.neutron [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Successfully updated port: 67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1296.186217] env[61545]: DEBUG nova.network.neutron [-] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.262387] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "refresh_cache-fee61bcc-8870-4f3e-8ab7-ac12ab41a826" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1296.262744] env[61545]: DEBUG nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Instance network_info: |[{"id": "247b84fb-fa03-4324-adea-ff8f63fd6ec0", "address": "fa:16:3e:3a:c6:bb", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap247b84fb-fa", "ovs_interfaceid": "247b84fb-fa03-4324-adea-ff8f63fd6ec0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1296.263325] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:c6:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a071ecf4-e713-4f97-9271-8c17952f6dee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '247b84fb-fa03-4324-adea-ff8f63fd6ec0', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1296.275757] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1296.278410] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1296.278662] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f1ff446-0b98-4c0d-859d-f16d5ee6e872 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.317817] env[61545]: DEBUG oslo_concurrency.lockutils [None req-bb684335-5859-4cef-a510-4117ae60645d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 29.621s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.319508] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.943s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.319508] env[61545]: INFO nova.compute.manager [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Unshelving [ 1296.323782] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1296.323782] env[61545]: value = "task-4256938" [ 1296.323782] env[61545]: _type = "Task" [ 1296.323782] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.332914] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256938, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.340180] env[61545]: DEBUG nova.network.neutron [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Successfully updated port: 9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1296.472358] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.472672] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.472832] env[61545]: DEBUG nova.network.neutron [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1296.549422] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad83ff9c-0967-427c-b494-0da7a705a3ab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.558940] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Received event network-changed-247b84fb-fa03-4324-adea-ff8f63fd6ec0 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1296.558940] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Refreshing instance network info cache due to event network-changed-247b84fb-fa03-4324-adea-ff8f63fd6ec0. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1296.558940] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Acquiring lock "refresh_cache-fee61bcc-8870-4f3e-8ab7-ac12ab41a826" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.558940] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Acquired lock "refresh_cache-fee61bcc-8870-4f3e-8ab7-ac12ab41a826" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.558940] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Refreshing network info cache for port 247b84fb-fa03-4324-adea-ff8f63fd6ec0 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1296.566106] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc26a6ae-a845-4890-be65-6eee0de928a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.603069] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7991579d-3948-4832-a7e9-75f4f3d440a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.614518] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3286998-5e29-4f5e-a5fa-458e1c06d4e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.633878] env[61545]: DEBUG nova.compute.provider_tree [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1296.688953] env[61545]: INFO nova.compute.manager [-] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Took 1.54 seconds to deallocate network for instance. [ 1296.815165] env[61545]: DEBUG nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1296.816364] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1296.816364] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1296.816364] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1296.816560] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1296.816560] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1296.816677] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1296.816961] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1296.820018] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1296.820018] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1296.820018] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1296.820018] env[61545]: DEBUG nova.virt.hardware [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1296.820018] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c568b498-1977-4fa7-a97a-97057548fce7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.836319] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f26584f-f8e6-4023-b3e3-de1bc965aaa1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.846236] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.846236] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.846236] env[61545]: DEBUG nova.network.neutron [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1296.854451] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256938, 'name': CreateVM_Task, 'duration_secs': 0.43204} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.862948] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1296.865051] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.865421] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.865888] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1296.866583] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df705b8c-2985-4a66-a44b-7f37cc3a4460 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.873372] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1296.873372] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e11025-1ddb-e49a-e93a-f0f38f4dfd44" [ 1296.873372] env[61545]: _type = "Task" [ 1296.873372] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.883112] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e11025-1ddb-e49a-e93a-f0f38f4dfd44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.030926] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquiring lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.032905] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.051100] env[61545]: WARNING nova.network.neutron [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] 980f8e73-b8ce-492a-90f5-f43e01dc44cd already exists in list: networks containing: ['980f8e73-b8ce-492a-90f5-f43e01dc44cd']. ignoring it [ 1297.162581] env[61545]: ERROR nova.scheduler.client.report [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] [req-039fc523-475c-4ad4-aab2-aeb829ee7e55] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-039fc523-475c-4ad4-aab2-aeb829ee7e55"}]} [ 1297.181042] env[61545]: DEBUG nova.scheduler.client.report [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1297.198587] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.204019] env[61545]: DEBUG nova.scheduler.client.report [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1297.204019] env[61545]: DEBUG nova.compute.provider_tree [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1297.215729] env[61545]: DEBUG nova.scheduler.client.report [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1297.242150] env[61545]: DEBUG nova.scheduler.client.report [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1297.358334] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.389020] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e11025-1ddb-e49a-e93a-f0f38f4dfd44, 'name': SearchDatastore_Task, 'duration_secs': 0.014832} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.390205] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.390573] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1297.390939] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.391242] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.391567] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1297.394197] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3eb3148-e6a8-4381-b51a-1a4a93bdfa81 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.406024] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1297.406024] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1297.406024] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-716ae4f5-b57f-4f74-b297-575ccc016911 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.414868] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1297.414868] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528b0ce1-edd0-e5c4-9c97-14a7cf324506" [ 1297.414868] env[61545]: _type = "Task" [ 1297.414868] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.419503] env[61545]: DEBUG nova.network.neutron [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1297.429021] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Updated VIF entry in instance network info cache for port 247b84fb-fa03-4324-adea-ff8f63fd6ec0. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1297.429021] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Updating instance_info_cache with network_info: [{"id": "247b84fb-fa03-4324-adea-ff8f63fd6ec0", "address": "fa:16:3e:3a:c6:bb", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap247b84fb-fa", "ovs_interfaceid": "247b84fb-fa03-4324-adea-ff8f63fd6ec0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.432152] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528b0ce1-edd0-e5c4-9c97-14a7cf324506, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.442486] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17f4da9-cc66-4f65-99b6-d1de126a05e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.454146] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7be2024-f0e6-4172-b314-8f44f61994bf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.499546] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735c4fd5-0376-4e98-a5d7-20d528a256e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.508572] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1202c0e-7976-4231-a1db-936202393111 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.524861] env[61545]: DEBUG nova.compute.provider_tree [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1297.533925] env[61545]: DEBUG nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1297.655869] env[61545]: DEBUG nova.network.neutron [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "67104b0f-71e5-40be-965b-8376a6c120f8", "address": "fa:16:3e:17:2c:e8", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67104b0f-71", "ovs_interfaceid": "67104b0f-71e5-40be-965b-8376a6c120f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.679457] env[61545]: DEBUG nova.network.neutron [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance_info_cache with network_info: [{"id": "9586e46e-23b8-40a2-9703-712bf31c9e96", "address": "fa:16:3e:f7:f4:1b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9586e46e-23", "ovs_interfaceid": "9586e46e-23b8-40a2-9703-712bf31c9e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.927321] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528b0ce1-edd0-e5c4-9c97-14a7cf324506, 'name': SearchDatastore_Task, 'duration_secs': 0.034487} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.927813] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df9e6dc2-f32a-459e-8a58-56b798d92f60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.934642] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Releasing lock "refresh_cache-fee61bcc-8870-4f3e-8ab7-ac12ab41a826" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.934926] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Received event network-vif-deleted-cf35a0bc-a18e-493b-b59c-794f2b962cd7 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1297.935161] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-vif-plugged-67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1297.935382] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Acquiring lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.938948] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.938948] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1297.938948] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] No waiting events found dispatching network-vif-plugged-67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1297.938948] env[61545]: WARNING nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received unexpected event network-vif-plugged-67104b0f-71e5-40be-965b-8376a6c120f8 for instance with vm_state active and task_state None. [ 1297.938948] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-changed-67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1297.938948] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing instance network info cache due to event network-changed-67104b0f-71e5-40be-965b-8376a6c120f8. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1297.938948] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.938948] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1297.938948] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a8dcfb-2233-39dc-eb6e-7f401952fa8b" [ 1297.938948] env[61545]: _type = "Task" [ 1297.938948] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.946934] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a8dcfb-2233-39dc-eb6e-7f401952fa8b, 'name': SearchDatastore_Task, 'duration_secs': 0.009934} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.947188] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.947447] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fee61bcc-8870-4f3e-8ab7-ac12ab41a826/fee61bcc-8870-4f3e-8ab7-ac12ab41a826.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1297.948216] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-775b3e56-5715-4133-897f-40a2c1404dee {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.955371] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1297.955371] env[61545]: value = "task-4256939" [ 1297.955371] env[61545]: _type = "Task" [ 1297.955371] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.964604] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.058232] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.062591] env[61545]: DEBUG nova.scheduler.client.report [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 171 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1298.063122] env[61545]: DEBUG nova.compute.provider_tree [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 171 to 172 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1298.063372] env[61545]: DEBUG nova.compute.provider_tree [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1298.159380] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.160273] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.160337] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.160606] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.160791] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing network info cache for port 67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1298.165040] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b5e3a4-34b9-416a-85aa-bb3e30972df7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.188184] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.188570] env[61545]: DEBUG nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Instance network_info: |[{"id": "9586e46e-23b8-40a2-9703-712bf31c9e96", "address": "fa:16:3e:f7:f4:1b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9586e46e-23", "ovs_interfaceid": "9586e46e-23b8-40a2-9703-712bf31c9e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1298.189348] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1298.189596] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1298.189763] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1298.189939] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1298.190103] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1298.190259] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1298.190469] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1298.190636] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1298.190802] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1298.190967] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1298.191163] env[61545]: DEBUG nova.virt.hardware [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1298.197697] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Reconfiguring VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1298.198278] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:f4:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9586e46e-23b8-40a2-9703-712bf31c9e96', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1298.205746] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1298.206360] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da440665-c88a-4939-86e5-eecf59c1ae9e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.218869] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1298.219577] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33107260-be46-4270-aea9-2a6d9eff16b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.240160] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1298.240160] env[61545]: value = "task-4256940" [ 1298.240160] env[61545]: _type = "Task" [ 1298.240160] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.241740] env[61545]: DEBUG oslo_vmware.api [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1298.241740] env[61545]: value = "task-4256941" [ 1298.241740] env[61545]: _type = "Task" [ 1298.241740] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.256674] env[61545]: DEBUG oslo_vmware.api [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256941, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.256885] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256940, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.467611] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256939, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.570590] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.783s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.573868] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.515s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.574977] env[61545]: DEBUG nova.objects.instance [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 5e047505-5466-4d89-bdd0-ebe6ac7fef0c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1298.595896] env[61545]: INFO nova.scheduler.client.report [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Deleted allocations for instance bd259162-c8ea-4408-9b7c-c91b9fbfc0d2 [ 1298.757481] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256940, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.761584] env[61545]: DEBUG oslo_vmware.api [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.968046] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518265} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.968230] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] fee61bcc-8870-4f3e-8ab7-ac12ab41a826/fee61bcc-8870-4f3e-8ab7-ac12ab41a826.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1298.968399] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1298.968675] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9511fe30-1471-4c54-8f68-8bf3e1ca3b1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.973954] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updated VIF entry in instance network info cache for port 67104b0f-71e5-40be-965b-8376a6c120f8. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1298.974707] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "67104b0f-71e5-40be-965b-8376a6c120f8", "address": "fa:16:3e:17:2c:e8", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67104b0f-71", "ovs_interfaceid": "67104b0f-71e5-40be-965b-8376a6c120f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.977724] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1298.977724] env[61545]: value = "task-4256942" [ 1298.977724] env[61545]: _type = "Task" [ 1298.977724] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.989345] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.105590] env[61545]: DEBUG oslo_concurrency.lockutils [None req-98979c6f-a7a3-4a95-aca3-37c7764e8a29 tempest-AttachVolumeShelveTestJSON-1204522908 tempest-AttachVolumeShelveTestJSON-1204522908-project-member] Lock "bd259162-c8ea-4408-9b7c-c91b9fbfc0d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.132s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.256055] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256940, 'name': CreateVM_Task, 'duration_secs': 0.603972} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.256055] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1299.256812] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'delete_on_termination': True, 'boot_index': 0, 'device_type': None, 'attachment_id': 'ff351564-1c8b-4596-9aca-394d09ed51ff', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838860', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'name': 'volume-f670e2ee-2d66-439c-be73-79914f3d6fd5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4e2eb270-abf0-4734-a49f-ac0b7ee141c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'serial': 'f670e2ee-2d66-439c-be73-79914f3d6fd5'}, 'guest_format': None, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=61545) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1299.257045] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Root volume attach. Driver type: vmdk {{(pid=61545) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1299.257927] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d5d435-f155-44cb-8d42-ef00f81cc850 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.263783] env[61545]: DEBUG oslo_vmware.api [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256941, 'name': ReconfigVM_Task, 'duration_secs': 0.816071} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.264716] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.264957] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Reconfigured VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1299.271554] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5171a29-2371-468d-918c-9a337bcb97f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.279229] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b00d718-dd26-43cc-8639-4f9e61863f9d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.287806] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-d3c1ddb5-9d4d-49fb-9ee2-c88b4359b876 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.296567] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1299.296567] env[61545]: value = "task-4256943" [ 1299.296567] env[61545]: _type = "Task" [ 1299.296567] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.307173] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.479194] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.479443] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Received event network-vif-plugged-9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1299.479671] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Acquiring lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.479941] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.480156] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.480391] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] No waiting events found dispatching network-vif-plugged-9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1299.480603] env[61545]: WARNING nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Received unexpected event network-vif-plugged-9586e46e-23b8-40a2-9703-712bf31c9e96 for instance with vm_state building and task_state spawning. [ 1299.480812] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Received event network-changed-9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1299.481085] env[61545]: DEBUG nova.compute.manager [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Refreshing instance network info cache due to event network-changed-9586e46e-23b8-40a2-9703-712bf31c9e96. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1299.481338] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Acquiring lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.481517] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Acquired lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.481724] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Refreshing network info cache for port 9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1299.492680] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077555} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.493648] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1299.494449] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9cd1d3-8863-466a-89f4-fa30c1a592b3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.518418] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] fee61bcc-8870-4f3e-8ab7-ac12ab41a826/fee61bcc-8870-4f3e-8ab7-ac12ab41a826.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1299.519076] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c65ed366-7e8d-47da-b7c8-a6a8e73a1073 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.545911] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1299.545911] env[61545]: value = "task-4256944" [ 1299.545911] env[61545]: _type = "Task" [ 1299.545911] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.562545] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256944, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.583912] env[61545]: DEBUG oslo_concurrency.lockutils [None req-f7e1e7bf-c668-4a2a-a67c-fb59849914b9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.585475] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.026s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.587494] env[61545]: INFO nova.compute.claims [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1299.773420] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7921aad3-2e8f-4bc4-935c-267e7bc26549 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-67104b0f-71e5-40be-965b-8376a6c120f8" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.680s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.815073] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task} progress is 42%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.063833] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256944, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.313027] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task} progress is 54%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.392541] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updated VIF entry in instance network info cache for port 9586e46e-23b8-40a2-9703-712bf31c9e96. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1300.392931] env[61545]: DEBUG nova.network.neutron [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance_info_cache with network_info: [{"id": "9586e46e-23b8-40a2-9703-712bf31c9e96", "address": "fa:16:3e:f7:f4:1b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9586e46e-23", "ovs_interfaceid": "9586e46e-23b8-40a2-9703-712bf31c9e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.567128] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256944, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.787932] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b94396-fbab-4383-b7b2-031ce9b24687 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.803990] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22d7c4d-386a-40f1-a108-517efdde7b4c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.848686] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc938a6-5d53-40f8-a906-44447a663fd6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.852013] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task} progress is 67%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.867297] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17e856c-2abf-47fd-a98a-db9eac55a8f1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.890244] env[61545]: DEBUG nova.compute.provider_tree [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1300.898979] env[61545]: DEBUG oslo_concurrency.lockutils [req-ae97248c-2356-46c6-8aff-afb4db7fb157 req-8165b2e4-1542-42a8-9d2f-dd964abc47bb service nova] Releasing lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.063733] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256944, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.318612] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task} progress is 81%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.356391] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-67104b0f-71e5-40be-965b-8376a6c120f8" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.356665] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-67104b0f-71e5-40be-965b-8376a6c120f8" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.435030] env[61545]: DEBUG nova.scheduler.client.report [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 172 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1301.435326] env[61545]: DEBUG nova.compute.provider_tree [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 172 to 173 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1301.435653] env[61545]: DEBUG nova.compute.provider_tree [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1301.564813] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256944, 'name': ReconfigVM_Task, 'duration_secs': 1.528884} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.564951] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Reconfigured VM instance instance-00000075 to attach disk [datastore2] fee61bcc-8870-4f3e-8ab7-ac12ab41a826/fee61bcc-8870-4f3e-8ab7-ac12ab41a826.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1301.565703] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3474c3bc-c8c5-4a3b-9253-14880ce995e0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.579858] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1301.579858] env[61545]: value = "task-4256946" [ 1301.579858] env[61545]: _type = "Task" [ 1301.579858] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.594931] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256946, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.811237] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task} progress is 95%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.859442] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.859637] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1301.860735] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bb703b-0361-434d-984b-0c425f0eddd6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.885983] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c72d4f1-2810-4f80-9f10-3c75e012571c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.915963] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Reconfiguring VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1301.916332] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-082e83aa-1ecc-4ea9-b81f-625c609eb3e8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.937308] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1301.937308] env[61545]: value = "task-4256947" [ 1301.937308] env[61545]: _type = "Task" [ 1301.937308] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.944109] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.358s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.944993] env[61545]: DEBUG nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1301.947818] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.749s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.948083] env[61545]: DEBUG nova.objects.instance [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lazy-loading 'resources' on Instance uuid 5e047505-5466-4d89-bdd0-ebe6ac7fef0c {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1301.956914] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.091521] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256946, 'name': Rename_Task, 'duration_secs': 0.202467} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.091762] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1302.092077] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42367371-ccb9-46f4-9318-526f3a56473e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.099868] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1302.099868] env[61545]: value = "task-4256948" [ 1302.099868] env[61545]: _type = "Task" [ 1302.099868] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.109163] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.315109] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.450095] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.451490] env[61545]: DEBUG nova.compute.utils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1302.456861] env[61545]: DEBUG nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1302.457038] env[61545]: DEBUG nova.network.neutron [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1302.512658] env[61545]: DEBUG nova.policy [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4333754ae4a4e26bab98dfe1853e667', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b64f16b672ff471ba1d48aa2490b9829', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1302.611944] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256948, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.669863] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f8a2b9-dbe3-49c1-aa5a-70c2ae18436e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.678292] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5afa31f-7fbb-49b2-8088-3ec1c8886f37 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.711749] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3e0a54-c672-4490-b1e2-82e2dca403b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.721350] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9257da6d-d27e-4ec0-bcc0-37da4a15b8b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.738083] env[61545]: DEBUG nova.compute.provider_tree [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1302.818663] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task} progress is 98%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.952475] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.960626] env[61545]: DEBUG nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1302.996903] env[61545]: DEBUG nova.network.neutron [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Successfully created port: 10ec8bc8-486b-4eea-8d00-5b81fe9f1380 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1303.114876] env[61545]: DEBUG oslo_vmware.api [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256948, 'name': PowerOnVM_Task, 'duration_secs': 0.665807} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.115212] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1303.115384] env[61545]: INFO nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Took 9.94 seconds to spawn the instance on the hypervisor. [ 1303.115569] env[61545]: DEBUG nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1303.116452] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0434e390-bd4a-4bcc-b0ea-157f4951811a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.240967] env[61545]: DEBUG nova.scheduler.client.report [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1303.316672] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256943, 'name': RelocateVM_Task, 'duration_secs': 3.724022} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.316974] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Volume attach. Driver type: vmdk {{(pid=61545) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1303.317204] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838860', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'name': 'volume-f670e2ee-2d66-439c-be73-79914f3d6fd5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4e2eb270-abf0-4734-a49f-ac0b7ee141c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'serial': 'f670e2ee-2d66-439c-be73-79914f3d6fd5'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1303.318084] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c05b05-d366-4e5d-94d3-08b1d3936888 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.335552] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bad8a07-511e-43e8-a7b0-2d5a897f62bc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.359109] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] volume-f670e2ee-2d66-439c-be73-79914f3d6fd5/volume-f670e2ee-2d66-439c-be73-79914f3d6fd5.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1303.359543] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-511a3f19-de46-4692-9929-f914e65d39c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.381024] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1303.381024] env[61545]: value = "task-4256949" [ 1303.381024] env[61545]: _type = "Task" [ 1303.381024] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.390071] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256949, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.452285] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.636929] env[61545]: INFO nova.compute.manager [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Took 14.76 seconds to build instance. [ 1303.746611] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.799s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.749727] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.391s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.750311] env[61545]: DEBUG nova.objects.instance [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'pci_requests' on Instance uuid 40bade64-b16b-4a33-a9ea-18f80a32c6bc {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.776212] env[61545]: INFO nova.scheduler.client.report [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted allocations for instance 5e047505-5466-4d89-bdd0-ebe6ac7fef0c [ 1303.892809] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256949, 'name': ReconfigVM_Task, 'duration_secs': 0.462627} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.893220] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Reconfigured VM instance instance-00000076 to attach disk [datastore2] volume-f670e2ee-2d66-439c-be73-79914f3d6fd5/volume-f670e2ee-2d66-439c-be73-79914f3d6fd5.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1303.898212] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61189cf2-eb6f-4a69-b0f0-f0a1f759ce0a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.914614] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1303.914614] env[61545]: value = "task-4256950" [ 1303.914614] env[61545]: _type = "Task" [ 1303.914614] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.926617] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.953711] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.968344] env[61545]: DEBUG nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1304.000147] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1304.000381] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.000547] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1304.000731] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.000878] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1304.001182] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1304.001599] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1304.001910] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1304.002498] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1304.002630] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1304.003150] env[61545]: DEBUG nova.virt.hardware [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1304.004465] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c41e7e-aa9c-471b-be51-a9d742a4c240 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.016887] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6648bf59-6fb8-4f9f-8397-320b361362cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.138342] env[61545]: DEBUG oslo_concurrency.lockutils [None req-0fb54966-11fb-42f4-819b-6b11c4f2844e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.275s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.255838] env[61545]: DEBUG nova.objects.instance [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'numa_topology' on Instance uuid 40bade64-b16b-4a33-a9ea-18f80a32c6bc {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1304.285920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-731d1c96-6b04-4a2e-9e7b-7994f12deec9 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "5e047505-5466-4d89-bdd0-ebe6ac7fef0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.802s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.425659] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256950, 'name': ReconfigVM_Task, 'duration_secs': 0.180434} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.425966] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838860', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'name': 'volume-f670e2ee-2d66-439c-be73-79914f3d6fd5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4e2eb270-abf0-4734-a49f-ac0b7ee141c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'serial': 'f670e2ee-2d66-439c-be73-79914f3d6fd5'} {{(pid=61545) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1304.426597] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da6f5df9-515f-4c59-a076-d46c6d5933b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.433639] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1304.433639] env[61545]: value = "task-4256951" [ 1304.433639] env[61545]: _type = "Task" [ 1304.433639] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.443322] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256951, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.453227] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.477901] env[61545]: DEBUG nova.compute.manager [req-66b8f2c3-e43b-410d-9d84-7c3985f9effd req-2d93a024-1ea9-4227-a7dc-b7a668db8a20 service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Received event network-vif-plugged-10ec8bc8-486b-4eea-8d00-5b81fe9f1380 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1304.477901] env[61545]: DEBUG oslo_concurrency.lockutils [req-66b8f2c3-e43b-410d-9d84-7c3985f9effd req-2d93a024-1ea9-4227-a7dc-b7a668db8a20 service nova] Acquiring lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.477901] env[61545]: DEBUG oslo_concurrency.lockutils [req-66b8f2c3-e43b-410d-9d84-7c3985f9effd req-2d93a024-1ea9-4227-a7dc-b7a668db8a20 service nova] Lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.477901] env[61545]: DEBUG oslo_concurrency.lockutils [req-66b8f2c3-e43b-410d-9d84-7c3985f9effd req-2d93a024-1ea9-4227-a7dc-b7a668db8a20 service nova] Lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.477901] env[61545]: DEBUG nova.compute.manager [req-66b8f2c3-e43b-410d-9d84-7c3985f9effd req-2d93a024-1ea9-4227-a7dc-b7a668db8a20 service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] No waiting events found dispatching network-vif-plugged-10ec8bc8-486b-4eea-8d00-5b81fe9f1380 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1304.477901] env[61545]: WARNING nova.compute.manager [req-66b8f2c3-e43b-410d-9d84-7c3985f9effd req-2d93a024-1ea9-4227-a7dc-b7a668db8a20 service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Received unexpected event network-vif-plugged-10ec8bc8-486b-4eea-8d00-5b81fe9f1380 for instance with vm_state building and task_state spawning. [ 1304.627810] env[61545]: DEBUG nova.network.neutron [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Successfully updated port: 10ec8bc8-486b-4eea-8d00-5b81fe9f1380 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.760470] env[61545]: INFO nova.compute.claims [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1304.945485] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256951, 'name': Rename_Task, 'duration_secs': 0.130381} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.949099] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1304.949414] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68e9a0a6-fd4e-4fd0-9e64-ca4cc4df08a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.957825] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.959950] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1304.959950] env[61545]: value = "task-4256952" [ 1304.959950] env[61545]: _type = "Task" [ 1304.959950] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.968452] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.975192] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.975503] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.975812] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.975912] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.976108] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.978616] env[61545]: INFO nova.compute.manager [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Terminating instance [ 1305.131110] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.131484] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.131841] env[61545]: DEBUG nova.network.neutron [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.458502] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.472076] env[61545]: DEBUG oslo_vmware.api [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256952, 'name': PowerOnVM_Task, 'duration_secs': 0.490007} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.473044] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1305.473044] env[61545]: INFO nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Took 8.66 seconds to spawn the instance on the hypervisor. [ 1305.473229] env[61545]: DEBUG nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1305.474135] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6b5908-3cad-4bfe-abd9-02cddf25e3d4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.482596] env[61545]: DEBUG nova.compute.manager [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1305.482856] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1305.488283] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e0141d-6b70-4f12-be7e-ae912eac7a4f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.498917] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1305.499249] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b67afb60-db90-4861-8c4c-2da030d96a8c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.510309] env[61545]: DEBUG oslo_vmware.api [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1305.510309] env[61545]: value = "task-4256953" [ 1305.510309] env[61545]: _type = "Task" [ 1305.510309] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.520709] env[61545]: DEBUG oslo_vmware.api [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256953, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.707503] env[61545]: DEBUG nova.network.neutron [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1305.938164] env[61545]: DEBUG nova.network.neutron [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance_info_cache with network_info: [{"id": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "address": "fa:16:3e:d6:2c:94", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ec8bc8-48", "ovs_interfaceid": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.960432] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.978565] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f5b228-fee2-49ad-8c77-2e4bf414e67f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.988485] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c64cacb-115e-43f3-abf1-9c6eaf0c2044 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.030469] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d795e9-2c1c-49ae-a217-a8dbed110473 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.034320] env[61545]: INFO nova.compute.manager [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Took 16.97 seconds to build instance. [ 1306.040744] env[61545]: DEBUG oslo_vmware.api [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256953, 'name': PowerOffVM_Task, 'duration_secs': 0.225976} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.044235] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1306.044558] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1306.045012] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9de35547-f41c-4a77-9229-3fed3694cd0b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.048331] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b238c3-cb82-4d50-a5cd-3e52f7e6402d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.066658] env[61545]: DEBUG nova.compute.provider_tree [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1306.421834] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1306.422113] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1306.422436] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleting the datastore file [datastore2] fee61bcc-8870-4f3e-8ab7-ac12ab41a826 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1306.422862] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50cbf2e9-47a1-4f80-bbf8-93a4535715c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.430737] env[61545]: DEBUG oslo_vmware.api [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1306.430737] env[61545]: value = "task-4256955" [ 1306.430737] env[61545]: _type = "Task" [ 1306.430737] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.440978] env[61545]: DEBUG oslo_vmware.api [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.443721] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.444075] env[61545]: DEBUG nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Instance network_info: |[{"id": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "address": "fa:16:3e:d6:2c:94", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ec8bc8-48", "ovs_interfaceid": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1306.444533] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:2c:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10ec8bc8-486b-4eea-8d00-5b81fe9f1380', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.453579] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1306.453940] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1306.458028] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33904e4f-0247-4c1e-86de-846259549939 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.479810] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.481664] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.481664] env[61545]: value = "task-4256956" [ 1306.481664] env[61545]: _type = "Task" [ 1306.481664] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.491641] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256956, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.519986] env[61545]: DEBUG nova.compute.manager [req-200ee98c-071f-4dcf-b8e3-d7710bb94833 req-c88aebc0-9492-4a2b-a0b3-f060016b1909 service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Received event network-changed-10ec8bc8-486b-4eea-8d00-5b81fe9f1380 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1306.519986] env[61545]: DEBUG nova.compute.manager [req-200ee98c-071f-4dcf-b8e3-d7710bb94833 req-c88aebc0-9492-4a2b-a0b3-f060016b1909 service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Refreshing instance network info cache due to event network-changed-10ec8bc8-486b-4eea-8d00-5b81fe9f1380. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1306.520180] env[61545]: DEBUG oslo_concurrency.lockutils [req-200ee98c-071f-4dcf-b8e3-d7710bb94833 req-c88aebc0-9492-4a2b-a0b3-f060016b1909 service nova] Acquiring lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.520223] env[61545]: DEBUG oslo_concurrency.lockutils [req-200ee98c-071f-4dcf-b8e3-d7710bb94833 req-c88aebc0-9492-4a2b-a0b3-f060016b1909 service nova] Acquired lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.520516] env[61545]: DEBUG nova.network.neutron [req-200ee98c-071f-4dcf-b8e3-d7710bb94833 req-c88aebc0-9492-4a2b-a0b3-f060016b1909 service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Refreshing network info cache for port 10ec8bc8-486b-4eea-8d00-5b81fe9f1380 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1306.536665] env[61545]: DEBUG oslo_concurrency.lockutils [None req-084910e2-6e4f-4f61-9687-89d060068b39 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.487s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.570027] env[61545]: DEBUG nova.scheduler.client.report [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1306.943525] env[61545]: DEBUG oslo_vmware.api [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256955, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253739} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.943830] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1306.943977] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1306.944197] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1306.944386] env[61545]: INFO nova.compute.manager [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Took 1.46 seconds to destroy the instance on the hypervisor. [ 1306.944635] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1306.944849] env[61545]: DEBUG nova.compute.manager [-] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1306.944959] env[61545]: DEBUG nova.network.neutron [-] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1306.964985] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.992636] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256956, 'name': CreateVM_Task, 'duration_secs': 0.356023} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.992824] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1306.993587] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.993769] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.994117] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1306.994391] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfb5f33e-fd68-4bee-866a-55111c24acc0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.000789] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1307.000789] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529459ce-aebc-7160-2491-8cedd30d44ff" [ 1307.000789] env[61545]: _type = "Task" [ 1307.000789] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.009852] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529459ce-aebc-7160-2491-8cedd30d44ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.077990] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.329s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.080940] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.023s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.082608] env[61545]: INFO nova.compute.claims [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1307.205240] env[61545]: INFO nova.network.neutron [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating port ff62e994-2e58-433b-884f-5b4fa7639d6b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1307.464496] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.512070] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529459ce-aebc-7160-2491-8cedd30d44ff, 'name': SearchDatastore_Task, 'duration_secs': 0.019701} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.512416] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.512696] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.512940] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.513094] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.513278] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.513557] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e295309a-aa24-4ee5-a591-9e37aae04525 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.524040] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.524264] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1307.525057] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93cb223d-2e9e-44fe-b459-fdc699436edb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.531498] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1307.531498] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52af63a1-0f52-8a8f-def8-ba486fafddb7" [ 1307.531498] env[61545]: _type = "Task" [ 1307.531498] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.540091] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52af63a1-0f52-8a8f-def8-ba486fafddb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.832989] env[61545]: DEBUG nova.network.neutron [req-200ee98c-071f-4dcf-b8e3-d7710bb94833 req-c88aebc0-9492-4a2b-a0b3-f060016b1909 service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updated VIF entry in instance network info cache for port 10ec8bc8-486b-4eea-8d00-5b81fe9f1380. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1307.833413] env[61545]: DEBUG nova.network.neutron [req-200ee98c-071f-4dcf-b8e3-d7710bb94833 req-c88aebc0-9492-4a2b-a0b3-f060016b1909 service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance_info_cache with network_info: [{"id": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "address": "fa:16:3e:d6:2c:94", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ec8bc8-48", "ovs_interfaceid": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.965348] env[61545]: DEBUG oslo_vmware.api [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256947, 'name': ReconfigVM_Task, 'duration_secs': 5.905443} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.965803] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.965803] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Reconfigured VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1308.046830] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52af63a1-0f52-8a8f-def8-ba486fafddb7, 'name': SearchDatastore_Task, 'duration_secs': 0.010021} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.047801] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89e29018-861a-4bef-827a-b91901f96081 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.055438] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1308.055438] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb8671-0f80-cee4-fc3a-a76e00c3b124" [ 1308.055438] env[61545]: _type = "Task" [ 1308.055438] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.066518] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb8671-0f80-cee4-fc3a-a76e00c3b124, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.289358] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e19bdb-b842-4a58-84ba-c56dedf7ba86 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.300451] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44c20bf-be7e-4346-9c70-ae8fbccaace5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.341729] env[61545]: DEBUG nova.network.neutron [-] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.347070] env[61545]: DEBUG oslo_concurrency.lockutils [req-200ee98c-071f-4dcf-b8e3-d7710bb94833 req-c88aebc0-9492-4a2b-a0b3-f060016b1909 service nova] Releasing lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.347070] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13c44ae-a70d-4bf5-b209-9b8fadd35edd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.356858] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ba554d-6021-4b0b-869e-f88283c60627 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.372930] env[61545]: DEBUG nova.compute.provider_tree [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1308.569623] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eb8671-0f80-cee4-fc3a-a76e00c3b124, 'name': SearchDatastore_Task, 'duration_secs': 0.011342} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.570733] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.570733] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e7af11e5-7500-4fc7-8c68-651376dff297/e7af11e5-7500-4fc7-8c68-651376dff297.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1308.570733] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b72bcca9-ed78-4702-96d8-1e865a20e85f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.578301] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1308.578301] env[61545]: value = "task-4256957" [ 1308.578301] env[61545]: _type = "Task" [ 1308.578301] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.586448] env[61545]: DEBUG nova.compute.manager [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Received event network-vif-deleted-247b84fb-fa03-4324-adea-ff8f63fd6ec0 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1308.586869] env[61545]: DEBUG nova.compute.manager [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Received event network-changed-aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1308.587144] env[61545]: DEBUG nova.compute.manager [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Refreshing instance network info cache due to event network-changed-aacdaf9b-9518-4298-a1df-ce1c3931e8e2. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1308.587441] env[61545]: DEBUG oslo_concurrency.lockutils [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] Acquiring lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.588053] env[61545]: DEBUG oslo_concurrency.lockutils [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] Acquired lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1308.588316] env[61545]: DEBUG nova.network.neutron [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Refreshing network info cache for port aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1308.603504] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.846635] env[61545]: INFO nova.compute.manager [-] [instance: fee61bcc-8870-4f3e-8ab7-ac12ab41a826] Took 1.90 seconds to deallocate network for instance. [ 1308.922733] env[61545]: DEBUG nova.scheduler.client.report [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 173 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1308.923118] env[61545]: DEBUG nova.compute.provider_tree [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 173 to 174 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1308.923389] env[61545]: DEBUG nova.compute.provider_tree [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1309.089385] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256957, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.356709] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.429447] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.430264] env[61545]: DEBUG nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1309.432801] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.076s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.433111] env[61545]: DEBUG nova.objects.instance [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lazy-loading 'resources' on Instance uuid fee61bcc-8870-4f3e-8ab7-ac12ab41a826 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1309.590777] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540294} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.591151] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] e7af11e5-7500-4fc7-8c68-651376dff297/e7af11e5-7500-4fc7-8c68-651376dff297.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1309.591451] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1309.591772] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9ce928d-912c-47dc-aa7e-d3faa6f57d11 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.599323] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1309.599323] env[61545]: value = "task-4256958" [ 1309.599323] env[61545]: _type = "Task" [ 1309.599323] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.610504] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.610724] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.610950] env[61545]: DEBUG nova.network.neutron [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1309.612435] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.887091] env[61545]: DEBUG nova.network.neutron [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updated VIF entry in instance network info cache for port aacdaf9b-9518-4298-a1df-ce1c3931e8e2. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1309.887585] env[61545]: DEBUG nova.network.neutron [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updating instance_info_cache with network_info: [{"id": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "address": "fa:16:3e:3d:9f:82", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaacdaf9b-95", "ovs_interfaceid": "aacdaf9b-9518-4298-a1df-ce1c3931e8e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.936173] env[61545]: DEBUG nova.compute.utils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1309.940727] env[61545]: DEBUG nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1309.940963] env[61545]: DEBUG nova.network.neutron [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1310.071198] env[61545]: DEBUG nova.policy [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c4aa77f63fb4d66bec0107df1d18e2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d982d4a29b34b4f9fce974e482b386c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1310.110060] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.110385] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1310.110659] env[61545]: DEBUG nova.network.neutron [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1310.118520] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080695} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.120555] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1310.120965] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1811ecf-383b-4c86-af2c-1d9b2b66fffa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.125583] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc45fe3-8924-4461-bda3-6f1dc65fa1b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.155646] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] e7af11e5-7500-4fc7-8c68-651376dff297/e7af11e5-7500-4fc7-8c68-651376dff297.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1310.157632] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7779650d-8e26-4981-b435-812ece583e6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.161686] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2cbdd17-e381-47f6-bb7b-1cc57cf821dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.215248] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b60ddcf-86e1-40d5-9c12-a095dea16dc3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.218579] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1310.218579] env[61545]: value = "task-4256959" [ 1310.218579] env[61545]: _type = "Task" [ 1310.218579] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.229300] env[61545]: DEBUG nova.compute.manager [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1310.234019] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1362da49-4591-41d6-ab51-7e7ce54fe622 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.242253] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256959, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.253612] env[61545]: DEBUG nova.compute.provider_tree [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1310.392661] env[61545]: DEBUG oslo_concurrency.lockutils [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] Releasing lock "refresh_cache-d0f42893-3332-4027-93df-bb46e3350485" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1310.392661] env[61545]: DEBUG nova.compute.manager [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Received event network-changed-9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1310.392661] env[61545]: DEBUG nova.compute.manager [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Refreshing instance network info cache due to event network-changed-9586e46e-23b8-40a2-9703-712bf31c9e96. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1310.392661] env[61545]: DEBUG oslo_concurrency.lockutils [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] Acquiring lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.392835] env[61545]: DEBUG oslo_concurrency.lockutils [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] Acquired lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1310.393039] env[61545]: DEBUG nova.network.neutron [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Refreshing network info cache for port 9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.441954] env[61545]: DEBUG nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1310.524588] env[61545]: INFO nova.network.neutron [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Port 67104b0f-71e5-40be-965b-8376a6c120f8 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1310.524957] env[61545]: DEBUG nova.network.neutron [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.606207] env[61545]: DEBUG nova.network.neutron [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Successfully created port: 98656068-e5a8-462f-bf47-905ed2b92aae {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1310.623252] env[61545]: DEBUG nova.compute.manager [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received event network-vif-plugged-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1310.623484] env[61545]: DEBUG oslo_concurrency.lockutils [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.623940] env[61545]: DEBUG oslo_concurrency.lockutils [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.623940] env[61545]: DEBUG oslo_concurrency.lockutils [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.624088] env[61545]: DEBUG nova.compute.manager [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] No waiting events found dispatching network-vif-plugged-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1310.624257] env[61545]: WARNING nova.compute.manager [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received unexpected event network-vif-plugged-ff62e994-2e58-433b-884f-5b4fa7639d6b for instance with vm_state shelved_offloaded and task_state spawning. [ 1310.624418] env[61545]: DEBUG nova.compute.manager [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received event network-changed-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1310.624583] env[61545]: DEBUG nova.compute.manager [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Refreshing instance network info cache due to event network-changed-ff62e994-2e58-433b-884f-5b4fa7639d6b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1310.624793] env[61545]: DEBUG oslo_concurrency.lockutils [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] Acquiring lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.731826] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256959, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.762303] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.779091] env[61545]: ERROR nova.scheduler.client.report [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [req-c51bb299-2310-4910-85d2-b3030db6b953] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c51bb299-2310-4910-85d2-b3030db6b953"}]} [ 1310.787462] env[61545]: DEBUG nova.compute.manager [req-8faa9a73-5ac7-4823-836f-f380ff7c2a16 req-8159acb4-98d4-4edd-9f80-8aa0c184726e service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1310.787666] env[61545]: DEBUG nova.compute.manager [req-8faa9a73-5ac7-4823-836f-f380ff7c2a16 req-8159acb4-98d4-4edd-9f80-8aa0c184726e service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing instance network info cache due to event network-changed-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1310.787859] env[61545]: DEBUG oslo_concurrency.lockutils [req-8faa9a73-5ac7-4823-836f-f380ff7c2a16 req-8159acb4-98d4-4edd-9f80-8aa0c184726e service nova] Acquiring lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.800527] env[61545]: DEBUG nova.scheduler.client.report [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1310.821344] env[61545]: DEBUG nova.scheduler.client.report [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1310.821732] env[61545]: DEBUG nova.compute.provider_tree [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1310.834671] env[61545]: DEBUG nova.scheduler.client.report [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1310.855658] env[61545]: DEBUG nova.scheduler.client.report [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1311.029730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1311.033282] env[61545]: DEBUG oslo_concurrency.lockutils [req-8faa9a73-5ac7-4823-836f-f380ff7c2a16 req-8159acb4-98d4-4edd-9f80-8aa0c184726e service nova] Acquired lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.033523] env[61545]: DEBUG nova.network.neutron [req-8faa9a73-5ac7-4823-836f-f380ff7c2a16 req-8159acb4-98d4-4edd-9f80-8aa0c184726e service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Refreshing network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.081897] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331d1144-d686-4c9f-ad4d-3abeb636996f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.090646] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87656a0d-80f5-4efa-880c-c88c72207911 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.127475] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd7266c-f669-44fc-a91e-8bc340cf8af8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.138744] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efbb73c-788b-464c-b3f2-42dd2e04941c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.154887] env[61545]: DEBUG nova.compute.provider_tree [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1311.183357] env[61545]: DEBUG nova.network.neutron [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [{"id": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "address": "fa:16:3e:09:25:39", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff62e994-2e", "ovs_interfaceid": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.233638] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256959, 'name': ReconfigVM_Task, 'duration_secs': 0.804529} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.234046] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Reconfigured VM instance instance-00000077 to attach disk [datastore2] e7af11e5-7500-4fc7-8c68-651376dff297/e7af11e5-7500-4fc7-8c68-651376dff297.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1311.237958] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c48ac962-7101-4394-871c-58fbfe8dab70 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.242938] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1311.242938] env[61545]: value = "task-4256960" [ 1311.242938] env[61545]: _type = "Task" [ 1311.242938] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.251889] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256960, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.412188] env[61545]: DEBUG nova.network.neutron [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updated VIF entry in instance network info cache for port 9586e46e-23b8-40a2-9703-712bf31c9e96. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.412571] env[61545]: DEBUG nova.network.neutron [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance_info_cache with network_info: [{"id": "9586e46e-23b8-40a2-9703-712bf31c9e96", "address": "fa:16:3e:f7:f4:1b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9586e46e-23", "ovs_interfaceid": "9586e46e-23b8-40a2-9703-712bf31c9e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.452990] env[61545]: DEBUG nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1311.482156] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1311.482423] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1311.482582] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1311.482758] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1311.482905] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1311.483066] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1311.483285] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1311.483487] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1311.483674] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1311.483848] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1311.484031] env[61545]: DEBUG nova.virt.hardware [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1311.484909] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85c7f8e-507b-4781-9f3f-c5f3191db8dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.493918] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5526676-3159-4f41-84c3-f2e4bca2b9a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.534310] env[61545]: DEBUG oslo_concurrency.lockutils [None req-43d016ed-de91-40d3-904c-60a9e11bd486 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-67104b0f-71e5-40be-965b-8376a6c120f8" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.177s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.638538] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-c677a1fe-4c95-4142-8f1a-fcc8a21389c6-67104b0f-71e5-40be-965b-8376a6c120f8" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.638906] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-c677a1fe-4c95-4142-8f1a-fcc8a21389c6-67104b0f-71e5-40be-965b-8376a6c120f8" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.639181] env[61545]: DEBUG nova.objects.instance [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'flavor' on Instance uuid c677a1fe-4c95-4142-8f1a-fcc8a21389c6 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1311.686728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1311.688938] env[61545]: DEBUG oslo_concurrency.lockutils [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] Acquired lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.689151] env[61545]: DEBUG nova.network.neutron [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Refreshing network info cache for port ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.703021] env[61545]: DEBUG nova.scheduler.client.report [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1311.703021] env[61545]: DEBUG nova.compute.provider_tree [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 176 to 177 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1311.703021] env[61545]: DEBUG nova.compute.provider_tree [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1311.716282] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='0cf2539adcfb9fe6e16b7ef68627bfb3',container_format='bare',created_at=2025-06-03T12:57:02Z,direct_url=,disk_format='vmdk',id=86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1702626430-shelved',owner='45efa52890714522b3058b7144b42a89',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-06-03T12:57:21Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1311.716529] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1311.716688] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1311.716873] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1311.717025] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1311.717178] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1311.717384] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1311.717543] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1311.717709] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1311.717869] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1311.718057] env[61545]: DEBUG nova.virt.hardware [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1311.719238] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b30e9c-e025-4f21-87f5-e436eb2d9742 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.728474] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6e27c9-a372-45db-8195-41709f7dc555 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.743442] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:25:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff62e994-2e58-433b-884f-5b4fa7639d6b', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1311.750953] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1311.752094] env[61545]: DEBUG nova.network.neutron [req-8faa9a73-5ac7-4823-836f-f380ff7c2a16 req-8159acb4-98d4-4edd-9f80-8aa0c184726e service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updated VIF entry in instance network info cache for port 3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.752454] env[61545]: DEBUG nova.network.neutron [req-8faa9a73-5ac7-4823-836f-f380ff7c2a16 req-8159acb4-98d4-4edd-9f80-8aa0c184726e service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [{"id": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "address": "fa:16:3e:f4:ae:80", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7d71d2-68", "ovs_interfaceid": "3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.753779] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1311.757580] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-728be2e9-e057-4bd6-a10f-eccdfe383720 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.778672] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256960, 'name': Rename_Task, 'duration_secs': 0.351211} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.780091] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1311.780345] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1311.780345] env[61545]: value = "task-4256961" [ 1311.780345] env[61545]: _type = "Task" [ 1311.780345] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.780522] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32bcdb15-789d-4337-b876-e58eaa6c8fa6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.791183] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256961, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.792701] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1311.792701] env[61545]: value = "task-4256962" [ 1311.792701] env[61545]: _type = "Task" [ 1311.792701] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.803473] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256962, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.915530] env[61545]: DEBUG oslo_concurrency.lockutils [req-5db1f866-ad6f-4775-bb8f-8f1b5ed916d8 req-d37bced4-1707-46da-8558-e82b147cc79e service nova] Releasing lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.209048] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.776s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.212585] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.450s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1312.241928] env[61545]: INFO nova.scheduler.client.report [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted allocations for instance fee61bcc-8870-4f3e-8ab7-ac12ab41a826 [ 1312.256183] env[61545]: DEBUG oslo_concurrency.lockutils [req-8faa9a73-5ac7-4823-836f-f380ff7c2a16 req-8159acb4-98d4-4edd-9f80-8aa0c184726e service nova] Releasing lock "refresh_cache-e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.293803] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256961, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.306815] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256962, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.329729] env[61545]: DEBUG nova.objects.instance [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'pci_requests' on Instance uuid c677a1fe-4c95-4142-8f1a-fcc8a21389c6 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1312.656950] env[61545]: DEBUG nova.network.neutron [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updated VIF entry in instance network info cache for port ff62e994-2e58-433b-884f-5b4fa7639d6b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1312.658198] env[61545]: DEBUG nova.network.neutron [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [{"id": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "address": "fa:16:3e:09:25:39", "network": {"id": "811f5ac2-8b17-4fe3-aa0b-c3e388b38e24", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1715918041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45efa52890714522b3058b7144b42a89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff62e994-2e", "ovs_interfaceid": "ff62e994-2e58-433b-884f-5b4fa7639d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.718407] env[61545]: INFO nova.compute.claims [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1312.753423] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b5ea4734-63b2-4fb9-b15c-30d07f80ce3b tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "fee61bcc-8870-4f3e-8ab7-ac12ab41a826" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.778s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.795347] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256961, 'name': CreateVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.804167] env[61545]: DEBUG oslo_vmware.api [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256962, 'name': PowerOnVM_Task, 'duration_secs': 0.516693} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.804438] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1312.804655] env[61545]: INFO nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Took 8.84 seconds to spawn the instance on the hypervisor. [ 1312.804858] env[61545]: DEBUG nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1312.805955] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38592dc6-9a2c-4cbb-bf32-99bf7963728d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.817990] env[61545]: DEBUG nova.network.neutron [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Successfully updated port: 98656068-e5a8-462f-bf47-905ed2b92aae {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1312.832933] env[61545]: DEBUG nova.objects.base [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1312.833201] env[61545]: DEBUG nova.network.neutron [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1312.937691] env[61545]: DEBUG nova.policy [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9563fb3475d740a0b6b2e443209501f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '845539fcaa744e59b6eb695b8a257de4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1312.949244] env[61545]: DEBUG nova.compute.manager [req-f92ad98a-23ba-487e-bda0-30d2dd95a4f3 req-12458b50-b0d5-471e-80f5-6fd15f315395 service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Received event network-vif-plugged-98656068-e5a8-462f-bf47-905ed2b92aae {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1312.949966] env[61545]: DEBUG oslo_concurrency.lockutils [req-f92ad98a-23ba-487e-bda0-30d2dd95a4f3 req-12458b50-b0d5-471e-80f5-6fd15f315395 service nova] Acquiring lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.950845] env[61545]: DEBUG oslo_concurrency.lockutils [req-f92ad98a-23ba-487e-bda0-30d2dd95a4f3 req-12458b50-b0d5-471e-80f5-6fd15f315395 service nova] Lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1312.951053] env[61545]: DEBUG oslo_concurrency.lockutils [req-f92ad98a-23ba-487e-bda0-30d2dd95a4f3 req-12458b50-b0d5-471e-80f5-6fd15f315395 service nova] Lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.951236] env[61545]: DEBUG nova.compute.manager [req-f92ad98a-23ba-487e-bda0-30d2dd95a4f3 req-12458b50-b0d5-471e-80f5-6fd15f315395 service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] No waiting events found dispatching network-vif-plugged-98656068-e5a8-462f-bf47-905ed2b92aae {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1312.951410] env[61545]: WARNING nova.compute.manager [req-f92ad98a-23ba-487e-bda0-30d2dd95a4f3 req-12458b50-b0d5-471e-80f5-6fd15f315395 service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Received unexpected event network-vif-plugged-98656068-e5a8-462f-bf47-905ed2b92aae for instance with vm_state building and task_state spawning. [ 1312.969031] env[61545]: DEBUG nova.compute.manager [req-1252be5d-f1e5-4477-a41a-c6e4f730844d req-f3625cc7-bb5b-4b7f-b176-9ca29f0b8ec7 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received event network-changed-735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1312.969148] env[61545]: DEBUG nova.compute.manager [req-1252be5d-f1e5-4477-a41a-c6e4f730844d req-f3625cc7-bb5b-4b7f-b176-9ca29f0b8ec7 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing instance network info cache due to event network-changed-735d1ad5-27dd-48fe-9d11-abc15c2f647b. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1312.969360] env[61545]: DEBUG oslo_concurrency.lockutils [req-1252be5d-f1e5-4477-a41a-c6e4f730844d req-f3625cc7-bb5b-4b7f-b176-9ca29f0b8ec7 service nova] Acquiring lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.969567] env[61545]: DEBUG oslo_concurrency.lockutils [req-1252be5d-f1e5-4477-a41a-c6e4f730844d req-f3625cc7-bb5b-4b7f-b176-9ca29f0b8ec7 service nova] Acquired lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.969742] env[61545]: DEBUG nova.network.neutron [req-1252be5d-f1e5-4477-a41a-c6e4f730844d req-f3625cc7-bb5b-4b7f-b176-9ca29f0b8ec7 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing network info cache for port 735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.160775] env[61545]: DEBUG oslo_concurrency.lockutils [req-8fcac565-cf5f-4260-adba-61e738d56260 req-c0dda66c-3088-4cdf-ba54-794d00a7fbeb service nova] Releasing lock "refresh_cache-40bade64-b16b-4a33-a9ea-18f80a32c6bc" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.227452] env[61545]: INFO nova.compute.resource_tracker [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating resource usage from migration ddb5e038-2bfa-4a2d-9185-4e47cd743140 [ 1313.300335] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256961, 'name': CreateVM_Task, 'duration_secs': 1.346669} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.300504] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1313.301199] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.301369] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.301793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1313.302157] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b9c6e95-2758-4f8c-b523-d853de7151de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.309998] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1313.309998] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523d877b-df2f-3b05-3028-3e46984c6b60" [ 1313.309998] env[61545]: _type = "Task" [ 1313.309998] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.328202] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquiring lock "refresh_cache-7e2f8c49-4a0c-4152-9d01-34219bba83f3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.328365] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquired lock "refresh_cache-7e2f8c49-4a0c-4152-9d01-34219bba83f3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.328518] env[61545]: DEBUG nova.network.neutron [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1313.329867] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]523d877b-df2f-3b05-3028-3e46984c6b60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.334395] env[61545]: INFO nova.compute.manager [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Took 17.80 seconds to build instance. [ 1313.402993] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d35a67f-d9b6-4095-a28f-2bc9a77fa87d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.412087] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78e134a-02d9-45db-bc84-8d52ef576113 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.444186] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2d34f7-c410-4876-a22d-35172abc72f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.452673] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202dbe0b-4cf3-49f8-b761-a5c649801127 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.467742] env[61545]: DEBUG nova.compute.provider_tree [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.543236] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "22437c4c-0e0b-4191-b079-3f6b7031656c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.543499] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "22437c4c-0e0b-4191-b079-3f6b7031656c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.543790] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "22437c4c-0e0b-4191-b079-3f6b7031656c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.544066] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "22437c4c-0e0b-4191-b079-3f6b7031656c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.544185] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "22437c4c-0e0b-4191-b079-3f6b7031656c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.546386] env[61545]: INFO nova.compute.manager [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Terminating instance [ 1313.789507] env[61545]: DEBUG nova.network.neutron [req-1252be5d-f1e5-4477-a41a-c6e4f730844d req-f3625cc7-bb5b-4b7f-b176-9ca29f0b8ec7 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updated VIF entry in instance network info cache for port 735d1ad5-27dd-48fe-9d11-abc15c2f647b. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.789893] env[61545]: DEBUG nova.network.neutron [req-1252be5d-f1e5-4477-a41a-c6e4f730844d req-f3625cc7-bb5b-4b7f-b176-9ca29f0b8ec7 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.823105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.823370] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Processing image 86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1313.823605] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.823752] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquired lock "[datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.823932] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1313.824221] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-847b5407-0eb9-44a2-907f-7d778f8521e9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.834424] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1313.834612] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1313.835372] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-495d0e9a-fb9a-4dcb-aa16-d0720c6929e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.838023] env[61545]: DEBUG oslo_concurrency.lockutils [None req-82a721e8-2224-47a6-8a2d-70a60b918c32 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.319s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.842561] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1313.842561] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5247bc39-1e1b-5522-5a06-4a4d49f1c925" [ 1313.842561] env[61545]: _type = "Task" [ 1313.842561] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.854161] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5247bc39-1e1b-5522-5a06-4a4d49f1c925, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.881228] env[61545]: DEBUG nova.network.neutron [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1313.974125] env[61545]: DEBUG nova.scheduler.client.report [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1314.052207] env[61545]: DEBUG nova.compute.manager [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1314.052207] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1314.053033] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6290e0f-4178-41e9-aab3-8eedf99f8928 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.060634] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.060910] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5519da9-db11-412c-a9ac-66dd89495a43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.068405] env[61545]: DEBUG oslo_vmware.api [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1314.068405] env[61545]: value = "task-4256963" [ 1314.068405] env[61545]: _type = "Task" [ 1314.068405] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.077020] env[61545]: DEBUG oslo_vmware.api [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.079866] env[61545]: DEBUG nova.network.neutron [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Updating instance_info_cache with network_info: [{"id": "98656068-e5a8-462f-bf47-905ed2b92aae", "address": "fa:16:3e:6b:33:22", "network": {"id": "66137b6b-3909-4259-bdfc-615ead27dbd8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1694487971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d982d4a29b34b4f9fce974e482b386c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98656068-e5", "ovs_interfaceid": "98656068-e5a8-462f-bf47-905ed2b92aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.187261] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.293310] env[61545]: DEBUG oslo_concurrency.lockutils [req-1252be5d-f1e5-4477-a41a-c6e4f730844d req-f3625cc7-bb5b-4b7f-b176-9ca29f0b8ec7 service nova] Releasing lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.357292] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Preparing fetch location {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1314.357560] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Fetch image to [datastore2] OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5/OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5.vmdk {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1314.357751] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Downloading stream optimized image 86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c to [datastore2] OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5/OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5.vmdk on the data store datastore2 as vApp {{(pid=61545) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1314.357920] env[61545]: DEBUG nova.virt.vmwareapi.images [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Downloading image file data 86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c to the ESX as VM named 'OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5' {{(pid=61545) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1314.457467] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1314.457467] env[61545]: value = "resgroup-9" [ 1314.457467] env[61545]: _type = "ResourcePool" [ 1314.457467] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1314.457790] env[61545]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f7ddcdbc-9262-4ddc-9984-87a85b54cf3b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.476560] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.264s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.476844] env[61545]: INFO nova.compute.manager [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Migrating [ 1314.494305] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease: (returnval){ [ 1314.494305] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f93d62-eb34-98e3-7fef-f0b03e4810e8" [ 1314.494305] env[61545]: _type = "HttpNfcLease" [ 1314.494305] env[61545]: } obtained for vApp import into resource pool (val){ [ 1314.494305] env[61545]: value = "resgroup-9" [ 1314.494305] env[61545]: _type = "ResourcePool" [ 1314.494305] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1314.494305] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the lease: (returnval){ [ 1314.494305] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f93d62-eb34-98e3-7fef-f0b03e4810e8" [ 1314.494305] env[61545]: _type = "HttpNfcLease" [ 1314.494305] env[61545]: } to be ready. {{(pid=61545) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1314.506015] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1314.506015] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f93d62-eb34-98e3-7fef-f0b03e4810e8" [ 1314.506015] env[61545]: _type = "HttpNfcLease" [ 1314.506015] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1314.580353] env[61545]: DEBUG oslo_vmware.api [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256963, 'name': PowerOffVM_Task, 'duration_secs': 0.267572} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.580712] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1314.580908] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1314.581223] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eea466a2-e5ec-44f2-8045-72adbd8cd606 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.583481] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Releasing lock "refresh_cache-7e2f8c49-4a0c-4152-9d01-34219bba83f3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.583911] env[61545]: DEBUG nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Instance network_info: |[{"id": "98656068-e5a8-462f-bf47-905ed2b92aae", "address": "fa:16:3e:6b:33:22", "network": {"id": "66137b6b-3909-4259-bdfc-615ead27dbd8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1694487971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d982d4a29b34b4f9fce974e482b386c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98656068-e5", "ovs_interfaceid": "98656068-e5a8-462f-bf47-905ed2b92aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1314.584722] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:33:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8cb478a6-872c-4a90-a8db-526b374e82ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98656068-e5a8-462f-bf47-905ed2b92aae', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1314.593473] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Creating folder: Project (6d982d4a29b34b4f9fce974e482b386c). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1314.594401] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-afe5b233-2899-435e-8ef1-0601c5a52bb0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.606547] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Created folder: Project (6d982d4a29b34b4f9fce974e482b386c) in parent group-v838542. [ 1314.606776] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Creating folder: Instances. Parent ref: group-v838868. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1314.607067] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-530afea9-a7b3-4eef-95c6-442337abb8ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.613872] env[61545]: DEBUG nova.network.neutron [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Successfully updated port: 67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1314.618400] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Created folder: Instances in parent group-v838868. [ 1314.618580] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1314.618806] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1314.619071] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1499c9df-648d-4bc7-9aa0-3670ab60ed2d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.641699] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1314.641699] env[61545]: value = "task-4256968" [ 1314.641699] env[61545]: _type = "Task" [ 1314.641699] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.650675] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256968, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.807977] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1314.808176] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1314.808368] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleting the datastore file [datastore2] 22437c4c-0e0b-4191-b079-3f6b7031656c {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1314.808660] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2dc3ac56-c7b1-4a93-9638-711e91a02f51 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.816596] env[61545]: DEBUG oslo_vmware.api [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1314.816596] env[61545]: value = "task-4256969" [ 1314.816596] env[61545]: _type = "Task" [ 1314.816596] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.825614] env[61545]: DEBUG oslo_vmware.api [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.995757] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.996234] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.996499] env[61545]: DEBUG nova.network.neutron [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1315.009257] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1315.009257] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f93d62-eb34-98e3-7fef-f0b03e4810e8" [ 1315.009257] env[61545]: _type = "HttpNfcLease" [ 1315.009257] env[61545]: } is initializing. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1315.117337] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.117587] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1315.117916] env[61545]: DEBUG nova.network.neutron [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1315.152119] env[61545]: DEBUG nova.compute.manager [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Received event network-changed-98656068-e5a8-462f-bf47-905ed2b92aae {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1315.152365] env[61545]: DEBUG nova.compute.manager [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Refreshing instance network info cache due to event network-changed-98656068-e5a8-462f-bf47-905ed2b92aae. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1315.152687] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Acquiring lock "refresh_cache-7e2f8c49-4a0c-4152-9d01-34219bba83f3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.152752] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Acquired lock "refresh_cache-7e2f8c49-4a0c-4152-9d01-34219bba83f3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1315.152915] env[61545]: DEBUG nova.network.neutron [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Refreshing network info cache for port 98656068-e5a8-462f-bf47-905ed2b92aae {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1315.158537] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4256968, 'name': CreateVM_Task, 'duration_secs': 0.402625} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.158966] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1315.159996] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.159996] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1315.160231] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1315.160784] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1b85715-b022-4e39-983a-b814632a3b3e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.166921] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1315.166921] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52036567-649f-c88e-65be-ac565f6338ca" [ 1315.166921] env[61545]: _type = "Task" [ 1315.166921] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.177268] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52036567-649f-c88e-65be-ac565f6338ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.187241] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.327741] env[61545]: DEBUG oslo_vmware.api [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4256969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182432} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.328213] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1315.328213] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1315.328358] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1315.328565] env[61545]: INFO nova.compute.manager [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1315.328828] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1315.329040] env[61545]: DEBUG nova.compute.manager [-] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1315.329141] env[61545]: DEBUG nova.network.neutron [-] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1315.509245] env[61545]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1315.509245] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f93d62-eb34-98e3-7fef-f0b03e4810e8" [ 1315.509245] env[61545]: _type = "HttpNfcLease" [ 1315.509245] env[61545]: } is ready. {{(pid=61545) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1315.509584] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1315.509584] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f93d62-eb34-98e3-7fef-f0b03e4810e8" [ 1315.509584] env[61545]: _type = "HttpNfcLease" [ 1315.509584] env[61545]: }. {{(pid=61545) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1315.510601] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbf9202-928e-4571-a269-4645c720a67f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.518706] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521e2439-e588-7e48-1d87-0944119a63eb/disk-0.vmdk from lease info. {{(pid=61545) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1315.518887] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521e2439-e588-7e48-1d87-0944119a63eb/disk-0.vmdk. {{(pid=61545) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1315.581296] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-21c38f23-57b3-4c87-85a8-ba93b5eea3b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.677544] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52036567-649f-c88e-65be-ac565f6338ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010841} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.677834] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1315.678082] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1315.678323] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.678470] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1315.678653] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1315.678918] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94c88719-adfd-4f27-9766-5d00b915dbc0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.687965] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1315.688193] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1315.688957] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00a46966-d132-4686-a131-064cc4132a02 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.694720] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1315.694720] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52351234-dd92-9e3d-daa2-9fad9a9f6cf8" [ 1315.694720] env[61545]: _type = "Task" [ 1315.694720] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.695544] env[61545]: WARNING nova.network.neutron [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] 980f8e73-b8ce-492a-90f5-f43e01dc44cd already exists in list: networks containing: ['980f8e73-b8ce-492a-90f5-f43e01dc44cd']. ignoring it [ 1315.707034] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52351234-dd92-9e3d-daa2-9fad9a9f6cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.009864} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.707823] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0a0230b-60f1-4cf3-ae5c-0d22a8b449c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.713654] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1315.713654] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5266c671-50b1-ec27-bd11-bd1e887775d3" [ 1315.713654] env[61545]: _type = "Task" [ 1315.713654] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.723827] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5266c671-50b1-ec27-bd11-bd1e887775d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.955662] env[61545]: DEBUG nova.compute.manager [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1316.112590] env[61545]: DEBUG nova.network.neutron [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance_info_cache with network_info: [{"id": "9586e46e-23b8-40a2-9703-712bf31c9e96", "address": "fa:16:3e:f7:f4:1b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9586e46e-23", "ovs_interfaceid": "9586e46e-23b8-40a2-9703-712bf31c9e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.127912] env[61545]: DEBUG nova.network.neutron [-] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.186883] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.187176] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.224995] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5266c671-50b1-ec27-bd11-bd1e887775d3, 'name': SearchDatastore_Task, 'duration_secs': 0.010206} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.225381] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.225633] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7e2f8c49-4a0c-4152-9d01-34219bba83f3/7e2f8c49-4a0c-4152-9d01-34219bba83f3.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1316.225936] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6d60033-47dc-40fb-97a8-3f9f854423b2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.234558] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1316.234558] env[61545]: value = "task-4256970" [ 1316.234558] env[61545]: _type = "Task" [ 1316.234558] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.239498] env[61545]: DEBUG nova.network.neutron [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Updated VIF entry in instance network info cache for port 98656068-e5a8-462f-bf47-905ed2b92aae. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1316.239861] env[61545]: DEBUG nova.network.neutron [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Updating instance_info_cache with network_info: [{"id": "98656068-e5a8-462f-bf47-905ed2b92aae", "address": "fa:16:3e:6b:33:22", "network": {"id": "66137b6b-3909-4259-bdfc-615ead27dbd8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1694487971-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d982d4a29b34b4f9fce974e482b386c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98656068-e5", "ovs_interfaceid": "98656068-e5a8-462f-bf47-905ed2b92aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.249704] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.394996] env[61545]: DEBUG nova.network.neutron [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "67104b0f-71e5-40be-965b-8376a6c120f8", "address": "fa:16:3e:17:2c:e8", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67104b0f-71", "ovs_interfaceid": "67104b0f-71e5-40be-965b-8376a6c120f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.487024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.487024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.617033] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.632115] env[61545]: INFO nova.compute.manager [-] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Took 1.30 seconds to deallocate network for instance. [ 1316.691332] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.746696] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Releasing lock "refresh_cache-7e2f8c49-4a0c-4152-9d01-34219bba83f3" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.746993] env[61545]: DEBUG nova.compute.manager [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received event network-vif-plugged-67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1316.747224] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Acquiring lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.747444] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.747609] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1316.747786] env[61545]: DEBUG nova.compute.manager [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] No waiting events found dispatching network-vif-plugged-67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1316.747957] env[61545]: WARNING nova.compute.manager [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received unexpected event network-vif-plugged-67104b0f-71e5-40be-965b-8376a6c120f8 for instance with vm_state active and task_state None. [ 1316.748143] env[61545]: DEBUG nova.compute.manager [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received event network-changed-67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1316.748305] env[61545]: DEBUG nova.compute.manager [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing instance network info cache due to event network-changed-67104b0f-71e5-40be-965b-8376a6c120f8. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1316.748482] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Acquiring lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.748766] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256970, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.898218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.898989] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.899237] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1316.899606] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Acquired lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1316.899812] env[61545]: DEBUG nova.network.neutron [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Refreshing network info cache for port 67104b0f-71e5-40be-965b-8376a6c120f8 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1316.901722] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e5a01e-3b84-48af-b690-239467d72a0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.922145] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1316.922400] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1316.922581] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1316.922814] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1316.922962] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1316.923123] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1316.923336] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1316.923494] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1316.923658] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1316.923820] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1316.923991] env[61545]: DEBUG nova.virt.hardware [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1316.930699] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Reconfiguring VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1316.936064] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f58959f2-e116-4b81-bf90-6ae01645016f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.957255] env[61545]: DEBUG oslo_vmware.api [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1316.957255] env[61545]: value = "task-4256971" [ 1316.957255] env[61545]: _type = "Task" [ 1316.957255] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.969698] env[61545]: DEBUG oslo_vmware.api [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256971, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.991639] env[61545]: INFO nova.compute.claims [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1317.043246] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Completed reading data from the image iterator. {{(pid=61545) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1317.043504] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521e2439-e588-7e48-1d87-0944119a63eb/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1317.044589] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951d783d-d900-4516-b2e1-3f150f21e597 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.052719] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521e2439-e588-7e48-1d87-0944119a63eb/disk-0.vmdk is in state: ready. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1317.052912] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521e2439-e588-7e48-1d87-0944119a63eb/disk-0.vmdk. {{(pid=61545) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1317.053186] env[61545]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6944ecb0-d53f-4a4d-af72-7d5bff06f8b5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.139359] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.247031] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580581} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.247511] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7e2f8c49-4a0c-4152-9d01-34219bba83f3/7e2f8c49-4a0c-4152-9d01-34219bba83f3.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1317.247741] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1317.248050] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01dc7b75-c44c-40da-abda-0fb687d66052 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.257165] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1317.257165] env[61545]: value = "task-4256972" [ 1317.257165] env[61545]: _type = "Task" [ 1317.257165] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.259779] env[61545]: DEBUG nova.compute.manager [req-d0a9f4b6-2a5c-4605-9980-4fcd369adad3 req-00eb465d-7c8c-4ca2-9581-b9ea59f0d564 service nova] [instance: 22437c4c-0e0b-4191-b079-3f6b7031656c] Received event network-vif-deleted-a5c32d4d-c669-44cf-891b-60956afffddd {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1317.269618] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256972, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.380943] env[61545]: DEBUG oslo_vmware.rw_handles [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521e2439-e588-7e48-1d87-0944119a63eb/disk-0.vmdk. {{(pid=61545) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1317.381193] env[61545]: INFO nova.virt.vmwareapi.images [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Downloaded image file data 86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c [ 1317.382127] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7a40c4-a928-4c29-a222-12feead415fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.398442] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9146ac7-4d79-44e8-963a-5bd13375625e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.467621] env[61545]: INFO nova.virt.vmwareapi.images [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] The imported VM was unregistered [ 1317.471019] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Caching image {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1317.471420] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Creating directory with path [datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1317.472476] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f536c2d4-9e83-42d9-9b1b-0d36f0f8a5ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.481727] env[61545]: DEBUG oslo_vmware.api [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.501238] env[61545]: INFO nova.compute.resource_tracker [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating resource usage from migration 48f4f990-0e12-40ad-a925-28b1254e1dc7 [ 1317.518534] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Created directory with path [datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1317.518890] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5/OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5.vmdk to [datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c.vmdk. {{(pid=61545) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1317.522811] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-376ec09d-af5d-46aa-87ab-4c7a8d746756 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.538020] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1317.538020] env[61545]: value = "task-4256974" [ 1317.538020] env[61545]: _type = "Task" [ 1317.538020] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.550773] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256974, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.701286] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cca1487-503d-402a-9788-895853f9a377 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.709812] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d71f7d8-6881-4066-a7b5-b2d89c1faf0d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.745797] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42556433-6da6-4ffa-b711-26be5d771e17 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.754616] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddec39e9-0d7a-4beb-aac9-20957b569a8b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.777426] env[61545]: DEBUG nova.compute.provider_tree [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1317.779820] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256972, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121113} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.780134] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1317.780971] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0fa3905-6e14-4206-9563-121f8ce1fddf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.807794] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 7e2f8c49-4a0c-4152-9d01-34219bba83f3/7e2f8c49-4a0c-4152-9d01-34219bba83f3.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1317.808597] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f44c28e-19fc-41c5-ac42-d9b48247fc10 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.833863] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1317.833863] env[61545]: value = "task-4256975" [ 1317.833863] env[61545]: _type = "Task" [ 1317.833863] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.844688] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256975, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.973804] env[61545]: DEBUG oslo_vmware.api [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256971, 'name': ReconfigVM_Task, 'duration_secs': 0.929246} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.974396] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1317.974624] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Reconfigured VM to attach interface {{(pid=61545) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1317.981076] env[61545]: DEBUG nova.network.neutron [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updated VIF entry in instance network info cache for port 67104b0f-71e5-40be-965b-8376a6c120f8. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1317.981076] env[61545]: DEBUG nova.network.neutron [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "67104b0f-71e5-40be-965b-8376a6c120f8", "address": "fa:16:3e:17:2c:e8", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67104b0f-71", "ovs_interfaceid": "67104b0f-71e5-40be-965b-8376a6c120f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.049868] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256974, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.141343] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20384b6-2bcf-4daf-96c6-51d7e4d5efa0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.163651] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance '4e2eb270-abf0-4734-a49f-ac0b7ee141c8' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1318.280847] env[61545]: DEBUG nova.scheduler.client.report [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1318.346126] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.479736] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7ebab330-589d-4e13-95e7-9ff0a8715059 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-c677a1fe-4c95-4142-8f1a-fcc8a21389c6-67104b0f-71e5-40be-965b-8376a6c120f8" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.841s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.483875] env[61545]: DEBUG oslo_concurrency.lockutils [req-78d8e5d0-6ec0-475d-99d1-7241cd914ee6 req-c68afd53-d0be-4e5f-b151-5813dc54ab55 service nova] Releasing lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1318.553153] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256974, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.670073] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1318.670297] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6d25a0c-c8dc-4549-8f5d-cc75f8bd4eab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.682325] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1318.682325] env[61545]: value = "task-4256976" [ 1318.682325] env[61545]: _type = "Task" [ 1318.682325] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.698131] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256976, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.786165] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.300s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.786356] env[61545]: INFO nova.compute.manager [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Migrating [ 1318.793658] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.102s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.793894] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.794093] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1318.794470] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.655s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.794723] env[61545]: DEBUG nova.objects.instance [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lazy-loading 'resources' on Instance uuid 22437c4c-0e0b-4191-b079-3f6b7031656c {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.799426] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b032210b-39f2-405c-aa7a-bf3b2c6e898f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.817868] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0f8e62-d5df-42a1-914f-8851b4aee8c5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.841145] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1949f74-61a6-4c31-958c-4e7e6077d560 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.852649] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256975, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.855402] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914bff40-dc86-438d-98f5-3c8b7a81eeaf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.899422] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179530MB free_disk=245GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1318.899658] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.050974] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256974, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.195194] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256976, 'name': PowerOffVM_Task, 'duration_secs': 0.456135} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.195473] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1319.195675] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance '4e2eb270-abf0-4734-a49f-ac0b7ee141c8' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1319.314072] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.314072] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1319.314072] env[61545]: DEBUG nova.network.neutron [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.348638] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256975, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.505211] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0df0e8-3801-4a1b-bc8d-1655476b5eaf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.514539] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7166e4f9-ef58-4eeb-a301-4b1c4c90fab2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.551198] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0127e7ce-8cb9-4370-85a3-739a0bc13f39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.562748] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256974, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.566834] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3420b1d5-6807-449d-bbb3-7630aa82cde2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.583998] env[61545]: DEBUG nova.compute.provider_tree [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.702373] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1319.702888] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1319.702888] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1319.703060] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1319.703144] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1319.703288] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1319.703501] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1319.703667] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1319.703835] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1319.703999] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1319.704191] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1319.709362] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab2c30dd-c294-4bb9-89eb-4e2779f03d94 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.729086] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1319.729086] env[61545]: value = "task-4256977" [ 1319.729086] env[61545]: _type = "Task" [ 1319.729086] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.740146] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256977, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.851692] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256975, 'name': ReconfigVM_Task, 'duration_secs': 1.733981} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.852091] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 7e2f8c49-4a0c-4152-9d01-34219bba83f3/7e2f8c49-4a0c-4152-9d01-34219bba83f3.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1319.852974] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d4db2a4-564d-4d16-8af4-1eb06b414b2d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.856297] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "interface-c677a1fe-4c95-4142-8f1a-fcc8a21389c6-67104b0f-71e5-40be-965b-8376a6c120f8" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.856596] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-c677a1fe-4c95-4142-8f1a-fcc8a21389c6-67104b0f-71e5-40be-965b-8376a6c120f8" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.864927] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1319.864927] env[61545]: value = "task-4256978" [ 1319.864927] env[61545]: _type = "Task" [ 1319.864927] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.881744] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256978, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.059678] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256974, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.080986] env[61545]: DEBUG nova.network.neutron [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance_info_cache with network_info: [{"id": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "address": "fa:16:3e:d6:2c:94", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ec8bc8-48", "ovs_interfaceid": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.088154] env[61545]: DEBUG nova.scheduler.client.report [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1320.242703] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256977, 'name': ReconfigVM_Task, 'duration_secs': 0.486666} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.243187] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance '4e2eb270-abf0-4734-a49f-ac0b7ee141c8' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1320.359922] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.360147] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.361103] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58f4711-5ea3-48da-9024-d3584087f59e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.381746] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036258a6-99d0-49ef-b957-9c21b55256e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.387614] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256978, 'name': Rename_Task, 'duration_secs': 0.45578} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.388334] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1320.388602] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23f74cff-f38c-40ca-8561-e9c2758c596a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.412606] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Reconfiguring VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1320.413486] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5c57ea7-8728-417a-9e4d-9ff61e010da0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.428173] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1320.428173] env[61545]: value = "task-4256979" [ 1320.428173] env[61545]: _type = "Task" [ 1320.428173] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.434173] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1320.434173] env[61545]: value = "task-4256980" [ 1320.434173] env[61545]: _type = "Task" [ 1320.434173] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.440592] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.446597] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.559177] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256974, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.800199} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.559613] env[61545]: INFO nova.virt.vmwareapi.ds_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5/OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5.vmdk to [datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c.vmdk. [ 1320.559765] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Cleaning up location [datastore2] OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1320.560066] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_589ec577-5840-4ab4-82f5-d7c029178da5 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1320.560415] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f48e97ef-d14d-4058-ad58-f4141eca71f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.569648] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1320.569648] env[61545]: value = "task-4256981" [ 1320.569648] env[61545]: _type = "Task" [ 1320.569648] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.579552] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.584643] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.593578] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.799s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.596188] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.696s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.618880] env[61545]: INFO nova.scheduler.client.report [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted allocations for instance 22437c4c-0e0b-4191-b079-3f6b7031656c [ 1320.751662] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1320.752009] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1320.752200] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1320.752875] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1320.752875] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1320.752875] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1320.753098] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1320.753145] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1320.753319] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1320.753507] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1320.753710] env[61545]: DEBUG nova.virt.hardware [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1320.759909] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Reconfiguring VM instance instance-00000076 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1320.760266] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2790067-5ef7-480f-aece-e546e28d6dab {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.781611] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1320.781611] env[61545]: value = "task-4256982" [ 1320.781611] env[61545]: _type = "Task" [ 1320.781611] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.790558] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256982, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.941731] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256979, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.947771] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.079967] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039096} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.080192] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1321.080344] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Releasing lock "[datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1321.080595] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c.vmdk to [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc/40bade64-b16b-4a33-a9ea-18f80a32c6bc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1321.080873] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4bd4791e-b288-4398-ac07-6b23b027f1f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.091723] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1321.091723] env[61545]: value = "task-4256983" [ 1321.091723] env[61545]: _type = "Task" [ 1321.091723] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.106888] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256983, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.126868] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6a12d53c-ccb2-442d-8467-b86c3a21416f tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "22437c4c-0e0b-4191-b079-3f6b7031656c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.583s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.291806] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256982, 'name': ReconfigVM_Task, 'duration_secs': 0.196466} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.292193] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Reconfigured VM instance instance-00000076 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1321.293034] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a667a7-77ff-46bd-a43d-5b2a106e5f4a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.315579] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] volume-f670e2ee-2d66-439c-be73-79914f3d6fd5/volume-f670e2ee-2d66-439c-be73-79914f3d6fd5.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1321.315926] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f81506ac-b576-4daa-acb6-1d68f571352f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.334753] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1321.334753] env[61545]: value = "task-4256984" [ 1321.334753] env[61545]: _type = "Task" [ 1321.334753] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.344132] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.442148] env[61545]: DEBUG oslo_vmware.api [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256979, 'name': PowerOnVM_Task, 'duration_secs': 0.641587} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.442923] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1321.443241] env[61545]: INFO nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1321.443481] env[61545]: DEBUG nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1321.444427] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3b8f71-0451-49a9-8339-431992f055cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.450967] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.609515] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Applying migration context for instance 4e2eb270-abf0-4734-a49f-ac0b7ee141c8 as it has an incoming, in-progress migration ddb5e038-2bfa-4a2d-9185-4e47cd743140. Migration status is migrating {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1321.609898] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Applying migration context for instance e7af11e5-7500-4fc7-8c68-651376dff297 as it has an incoming, in-progress migration 48f4f990-0e12-40ad-a925-28b1254e1dc7. Migration status is migrating {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1321.611796] env[61545]: INFO nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating resource usage from migration ddb5e038-2bfa-4a2d-9185-4e47cd743140 [ 1321.612315] env[61545]: INFO nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating resource usage from migration 48f4f990-0e12-40ad-a925-28b1254e1dc7 [ 1321.615515] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256983, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d0f42893-3332-4027-93df-bb46e3350485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 9a59f45b-727f-45ea-ad33-64fa23aaffe7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e24a6086-7dd1-4e75-b49e-dcc7c28eaea8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 99c9b4ab-efcf-4e13-bd92-c634972fe082 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance c677a1fe-4c95-4142-8f1a-fcc8a21389c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 40bade64-b16b-4a33-a9ea-18f80a32c6bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 7e2f8c49-4a0c-4152-9d01-34219bba83f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Migration ddb5e038-2bfa-4a2d-9185-4e47cd743140 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 4e2eb270-abf0-4734-a49f-ac0b7ee141c8 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Migration 48f4f990-0e12-40ad-a925-28b1254e1dc7 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance e7af11e5-7500-4fc7-8c68-651376dff297 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1321.637903] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2752MB phys_disk=250GB used_disk=9GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1321.807393] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b33cbc-5275-467e-a90e-c6c434265d5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.818343] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a541117-d604-41dd-9fe5-b6bf1ac26d43 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.861786] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88090c8d-a1b7-497d-b906-f6dd50724efe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.871080] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256984, 'name': ReconfigVM_Task, 'duration_secs': 0.331983} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.873688] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Reconfigured VM instance instance-00000076 to attach disk [datastore2] volume-f670e2ee-2d66-439c-be73-79914f3d6fd5/volume-f670e2ee-2d66-439c-be73-79914f3d6fd5.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1321.874034] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance '4e2eb270-abf0-4734-a49f-ac0b7ee141c8' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1321.878731] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1601128b-d5a4-4321-91e3-6ef87e428525 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.896297] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1321.947245] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.968225] env[61545]: INFO nova.compute.manager [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Took 23.93 seconds to build instance. [ 1322.105825] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93367b32-499b-464c-8c03-6414bccde103 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.113076] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256983, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.130731] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance 'e7af11e5-7500-4fc7-8c68-651376dff297' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1322.386456] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd85808-2288-4e06-b917-b1ed923621a2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.410935] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1322.415406] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e21db03-07a1-4c17-a58d-3a61b60ba8f0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.436135] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance '4e2eb270-abf0-4734-a49f-ac0b7ee141c8' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1322.450862] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.471100] env[61545]: DEBUG oslo_concurrency.lockutils [None req-870067e3-70d0-493a-b157-7500b3a45e8b tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.439s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.499333] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "5f82f8f7-be58-4a75-9420-2c321e480c26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.499552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "5f82f8f7-be58-4a75-9420-2c321e480c26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.607079] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256983, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.637233] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1322.637606] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f22299f5-c094-48ee-b971-0aee24de0b25 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.647248] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1322.647248] env[61545]: value = "task-4256985" [ 1322.647248] env[61545]: _type = "Task" [ 1322.647248] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.658268] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.920207] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1322.920426] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.324s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.961963] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.002927] env[61545]: DEBUG nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1323.067049] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquiring lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.067431] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.067662] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquiring lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.067905] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.068144] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.070737] env[61545]: INFO nova.compute.manager [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Terminating instance [ 1323.107338] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256983, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.160909] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256985, 'name': PowerOffVM_Task, 'duration_secs': 0.392325} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.161180] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1323.161441] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance 'e7af11e5-7500-4fc7-8c68-651376dff297' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1323.452278] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.527470] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.527770] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.529330] env[61545]: INFO nova.compute.claims [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1323.575375] env[61545]: DEBUG nova.compute.manager [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1323.575712] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1323.576611] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272cf21a-8d32-4efd-aa09-d726ce572220 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.586749] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.587060] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c6ec435-bb21-452a-92fa-2554af050a4d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.595663] env[61545]: DEBUG oslo_vmware.api [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1323.595663] env[61545]: value = "task-4256986" [ 1323.595663] env[61545]: _type = "Task" [ 1323.595663] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.610881] env[61545]: DEBUG oslo_vmware.api [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.615255] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256983, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.668172] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1323.668693] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1323.668693] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1323.668875] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1323.668949] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1323.669082] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1323.669308] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1323.669475] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1323.669768] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1323.669983] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1323.670184] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1323.675822] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0941829-7089-4250-a6e8-958af98930e5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.697936] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1323.697936] env[61545]: value = "task-4256987" [ 1323.697936] env[61545]: _type = "Task" [ 1323.697936] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.712297] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256987, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.956628] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.113808] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256983, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.118586] env[61545]: DEBUG oslo_vmware.api [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256986, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.124287] env[61545]: DEBUG nova.network.neutron [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Port 9586e46e-23b8-40a2-9703-712bf31c9e96 binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1324.211639] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256987, 'name': ReconfigVM_Task, 'duration_secs': 0.413947} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.212926] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance 'e7af11e5-7500-4fc7-8c68-651376dff297' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1324.454079] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.610012] env[61545]: DEBUG oslo_vmware.api [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256986, 'name': PowerOffVM_Task, 'duration_secs': 0.546096} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.613059] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1324.613250] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1324.614046] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b86647b-6cbe-42cc-a564-7f971e66c1a6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.618442] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256983, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.127094} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.619016] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c/86eba4c3-496e-4cb2-b4bf-ef3b023fdd8c.vmdk to [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc/40bade64-b16b-4a33-a9ea-18f80a32c6bc.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1324.619915] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc13b10f-b7f0-476c-b849-d5ce457dfa73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.647473] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc/40bade64-b16b-4a33-a9ea-18f80a32c6bc.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1324.650737] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfe944ab-fdac-4ce7-832d-bc283fde1963 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.671314] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1324.671314] env[61545]: value = "task-4256989" [ 1324.671314] env[61545]: _type = "Task" [ 1324.671314] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.682399] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256989, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.684146] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1324.684370] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1324.684581] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Deleting the datastore file [datastore2] 7e2f8c49-4a0c-4152-9d01-34219bba83f3 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1324.684860] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99cac8a6-373c-40a9-9b66-74019047adce {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.691891] env[61545]: DEBUG oslo_vmware.api [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for the task: (returnval){ [ 1324.691891] env[61545]: value = "task-4256990" [ 1324.691891] env[61545]: _type = "Task" [ 1324.691891] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.705062] env[61545]: DEBUG oslo_vmware.api [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.719710] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1324.721067] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1324.721067] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1324.721067] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1324.721067] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1324.721067] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1324.721362] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1324.721404] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1324.721609] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1324.721794] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1324.722057] env[61545]: DEBUG nova.virt.hardware [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1324.728335] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1324.731887] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0115c1ad-22a2-436d-8a5d-396b3cda1888 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.751313] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1324.751313] env[61545]: value = "task-4256991" [ 1324.751313] env[61545]: _type = "Task" [ 1324.751313] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.762825] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256991, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.771044] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98791bfb-6b17-413c-9a5a-e15c2fa2cc1c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.779323] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283d1296-eeec-4476-83e9-006199113292 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.810332] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8417ae-0d9b-48b9-89c4-8e37cab07ad5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.818969] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81644abd-f707-49e8-8da2-7e5f96e55a34 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.836621] env[61545]: DEBUG nova.compute.provider_tree [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1324.920699] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.921058] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.955275] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.165815] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1325.166106] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1325.166224] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.181897] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256989, 'name': ReconfigVM_Task, 'duration_secs': 0.386081} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.182794] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc/40bade64-b16b-4a33-a9ea-18f80a32c6bc.vmdk or device None with type streamOptimized {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1325.183825] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25a9a00e-0b47-48fc-b523-a32be74e3ec5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.191293] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1325.191293] env[61545]: value = "task-4256992" [ 1325.191293] env[61545]: _type = "Task" [ 1325.191293] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.203373] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256992, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.206596] env[61545]: DEBUG oslo_vmware.api [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Task: {'id': task-4256990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212073} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.206791] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.206975] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1325.207169] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1325.207345] env[61545]: INFO nova.compute.manager [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1325.207584] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1325.207770] env[61545]: DEBUG nova.compute.manager [-] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1325.207862] env[61545]: DEBUG nova.network.neutron [-] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1325.261975] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256991, 'name': ReconfigVM_Task, 'duration_secs': 0.234968} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.262281] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1325.263158] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5591bacd-a1ec-493b-b955-0c1926e360e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.287813] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] e7af11e5-7500-4fc7-8c68-651376dff297/e7af11e5-7500-4fc7-8c68-651376dff297.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1325.288200] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18593c89-c4ab-4d80-82e4-cbffdc649242 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.308079] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1325.308079] env[61545]: value = "task-4256993" [ 1325.308079] env[61545]: _type = "Task" [ 1325.308079] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.317680] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256993, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.341071] env[61545]: DEBUG nova.scheduler.client.report [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1325.429617] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.430043] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.430367] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.430577] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1325.456206] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.529729] env[61545]: DEBUG nova.compute.manager [req-16f1d9bf-ee06-4499-b1db-2349104e9834 req-6ab1c85b-d7f8-496e-ab1e-4908914717ad service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Received event network-vif-deleted-98656068-e5a8-462f-bf47-905ed2b92aae {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1325.529950] env[61545]: INFO nova.compute.manager [req-16f1d9bf-ee06-4499-b1db-2349104e9834 req-6ab1c85b-d7f8-496e-ab1e-4908914717ad service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Neutron deleted interface 98656068-e5a8-462f-bf47-905ed2b92aae; detaching it from the instance and deleting it from the info cache [ 1325.530177] env[61545]: DEBUG nova.network.neutron [req-16f1d9bf-ee06-4499-b1db-2349104e9834 req-6ab1c85b-d7f8-496e-ab1e-4908914717ad service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.702166] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256992, 'name': Rename_Task, 'duration_secs': 0.185036} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.702574] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1325.702704] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb9ac977-ce87-412d-a774-514ecd278713 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.710672] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1325.710672] env[61545]: value = "task-4256994" [ 1325.710672] env[61545]: _type = "Task" [ 1325.710672] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.719249] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256994, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.819326] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4256993, 'name': ReconfigVM_Task, 'duration_secs': 0.278104} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.819635] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Reconfigured VM instance instance-00000077 to attach disk [datastore2] e7af11e5-7500-4fc7-8c68-651376dff297/e7af11e5-7500-4fc7-8c68-651376dff297.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1325.819917] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance 'e7af11e5-7500-4fc7-8c68-651376dff297' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1325.847764] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.320s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.848354] env[61545]: DEBUG nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1325.956584] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.009562] env[61545]: DEBUG nova.network.neutron [-] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.032397] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-662c05e9-b1ab-4b07-98c1-aeb85a9f7bfb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.043199] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa1affa-a896-477b-b637-98a735f92a99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.076279] env[61545]: DEBUG nova.compute.manager [req-16f1d9bf-ee06-4499-b1db-2349104e9834 req-6ab1c85b-d7f8-496e-ab1e-4908914717ad service nova] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Detach interface failed, port_id=98656068-e5a8-462f-bf47-905ed2b92aae, reason: Instance 7e2f8c49-4a0c-4152-9d01-34219bba83f3 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1326.215946] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.216210] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1326.216395] env[61545]: DEBUG nova.network.neutron [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1326.224380] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256994, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.327416] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109c3ca9-de73-48be-9510-8996307158c2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.348226] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ce33af-2f6f-422f-819d-8ca28a0ddf13 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.353267] env[61545]: DEBUG nova.compute.utils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1326.367454] env[61545]: DEBUG nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1326.367642] env[61545]: DEBUG nova.network.neutron [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1326.370258] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance 'e7af11e5-7500-4fc7-8c68-651376dff297' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1326.373991] env[61545]: DEBUG nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1326.422469] env[61545]: DEBUG nova.policy [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d0d78511dd5408cba4db4e57271b5c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b790c7b2af394de28f7f42ce0d230346', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1326.457234] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.512465] env[61545]: INFO nova.compute.manager [-] [instance: 7e2f8c49-4a0c-4152-9d01-34219bba83f3] Took 1.30 seconds to deallocate network for instance. [ 1326.724648] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256994, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.745061] env[61545]: DEBUG nova.network.neutron [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Successfully created port: fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1326.957878] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.973245] env[61545]: DEBUG nova.network.neutron [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Port 10ec8bc8-486b-4eea-8d00-5b81fe9f1380 binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1327.018584] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.018882] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1327.019083] env[61545]: DEBUG nova.objects.instance [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lazy-loading 'resources' on Instance uuid 7e2f8c49-4a0c-4152-9d01-34219bba83f3 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1327.107488] env[61545]: DEBUG nova.network.neutron [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance_info_cache with network_info: [{"id": "9586e46e-23b8-40a2-9703-712bf31c9e96", "address": "fa:16:3e:f7:f4:1b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9586e46e-23", "ovs_interfaceid": "9586e46e-23b8-40a2-9703-712bf31c9e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.224777] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256994, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.385893] env[61545]: DEBUG nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1327.413793] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1327.414081] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.414256] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1327.414492] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.414694] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1327.414821] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1327.414996] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1327.415184] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1327.415353] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1327.415515] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1327.415686] env[61545]: DEBUG nova.virt.hardware [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1327.416644] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d12579-aefa-4f67-9a4e-3f3cebfb993f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.427713] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd08f8f-20ae-4b0a-b528-1fe049875081 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.460201] env[61545]: DEBUG oslo_vmware.api [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256980, 'name': ReconfigVM_Task, 'duration_secs': 6.782937} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.460572] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1327.460881] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Reconfigured VM to detach interface {{(pid=61545) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1327.612031] env[61545]: DEBUG oslo_concurrency.lockutils [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1327.712510] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37628c08-63d2-4e5c-b454-8a7f890ff0b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.724795] env[61545]: DEBUG oslo_vmware.api [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256994, 'name': PowerOnVM_Task, 'duration_secs': 1.906034} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.726522] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1327.729756] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38beb61d-ffa9-4b23-8bef-ab5fbcf21911 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.768504] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1214b9ab-34e5-4683-be1f-e5d3939609a5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.777322] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f05ca7-bd2b-4a77-870d-403e619b96ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.795731] env[61545]: DEBUG nova.compute.provider_tree [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.841115] env[61545]: DEBUG nova.compute.manager [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1327.842115] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02f6803-9178-42ed-aa03-b772731e8d9a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.002959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1328.002959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.003242] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.125427] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74afb7f0-a4a7-4882-9122-f348db80ce42 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.133079] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3028846f-47fc-4f51-a2e7-2066a78c223c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.299627] env[61545]: DEBUG nova.scheduler.client.report [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1328.361027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-62628359-d9e3-4f4c-83a9-bbb5cac8fa4d tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 32.042s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.480925] env[61545]: DEBUG nova.compute.manager [req-44be5697-43f0-4e79-b9b4-1a99514a12ba req-b293091a-e8e2-439b-a433-c9de8439b8b2 service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Received event network-vif-plugged-fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1328.480925] env[61545]: DEBUG oslo_concurrency.lockutils [req-44be5697-43f0-4e79-b9b4-1a99514a12ba req-b293091a-e8e2-439b-a433-c9de8439b8b2 service nova] Acquiring lock "5f82f8f7-be58-4a75-9420-2c321e480c26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1328.480925] env[61545]: DEBUG oslo_concurrency.lockutils [req-44be5697-43f0-4e79-b9b4-1a99514a12ba req-b293091a-e8e2-439b-a433-c9de8439b8b2 service nova] Lock "5f82f8f7-be58-4a75-9420-2c321e480c26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.480925] env[61545]: DEBUG oslo_concurrency.lockutils [req-44be5697-43f0-4e79-b9b4-1a99514a12ba req-b293091a-e8e2-439b-a433-c9de8439b8b2 service nova] Lock "5f82f8f7-be58-4a75-9420-2c321e480c26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.480925] env[61545]: DEBUG nova.compute.manager [req-44be5697-43f0-4e79-b9b4-1a99514a12ba req-b293091a-e8e2-439b-a433-c9de8439b8b2 service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] No waiting events found dispatching network-vif-plugged-fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1328.482134] env[61545]: WARNING nova.compute.manager [req-44be5697-43f0-4e79-b9b4-1a99514a12ba req-b293091a-e8e2-439b-a433-c9de8439b8b2 service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Received unexpected event network-vif-plugged-fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d for instance with vm_state building and task_state spawning. [ 1328.618784] env[61545]: DEBUG nova.network.neutron [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Successfully updated port: fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1328.805441] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.786s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.820334] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.820525] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquired lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1328.820744] env[61545]: DEBUG nova.network.neutron [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.832008] env[61545]: INFO nova.scheduler.client.report [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Deleted allocations for instance 7e2f8c49-4a0c-4152-9d01-34219bba83f3 [ 1329.043498] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.043697] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1329.043875] env[61545]: DEBUG nova.network.neutron [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1329.123696] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "refresh_cache-5f82f8f7-be58-4a75-9420-2c321e480c26" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.123858] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "refresh_cache-5f82f8f7-be58-4a75-9420-2c321e480c26" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1329.124037] env[61545]: DEBUG nova.network.neutron [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1329.240649] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420cb9ca-d02c-48fe-a5d7-5758f1618d90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.260679] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.261043] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.261313] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.261513] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.261714] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.264447] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988c2d8a-4af6-4ea6-bb06-3eac49213209 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.267632] env[61545]: INFO nova.compute.manager [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Terminating instance [ 1329.274943] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance '4e2eb270-abf0-4734-a49f-ac0b7ee141c8' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1329.339495] env[61545]: DEBUG oslo_concurrency.lockutils [None req-30e08ea4-42df-46eb-bb34-fe8013f8f135 tempest-ServersNegativeTestMultiTenantJSON-147642954 tempest-ServersNegativeTestMultiTenantJSON-147642954-project-member] Lock "7e2f8c49-4a0c-4152-9d01-34219bba83f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.272s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.386251] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.386536] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.386784] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.386988] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.387179] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.390047] env[61545]: INFO nova.compute.manager [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Terminating instance [ 1329.716032] env[61545]: DEBUG nova.network.neutron [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1329.744475] env[61545]: INFO nova.network.neutron [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Port 67104b0f-71e5-40be-965b-8376a6c120f8 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1329.744836] env[61545]: DEBUG nova.network.neutron [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [{"id": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "address": "fa:16:3e:2d:c7:64", "network": {"id": "980f8e73-b8ce-492a-90f5-f43e01dc44cd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-308755015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "845539fcaa744e59b6eb695b8a257de4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap735d1ad5-27", "ovs_interfaceid": "735d1ad5-27dd-48fe-9d11-abc15c2f647b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.774829] env[61545]: DEBUG nova.compute.manager [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1329.775141] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1329.776031] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99e8b73-94f3-4336-8979-66ea1ff40a99 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.781036] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1329.781888] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d774cb91-2b1a-49bb-b286-2dc3e4da5a92 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.790905] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1329.793681] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0df6ac0e-cfb0-45bd-a184-e4e4b86c67c4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.800112] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1329.800112] env[61545]: value = "task-4256995" [ 1329.800112] env[61545]: _type = "Task" [ 1329.800112] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.801397] env[61545]: DEBUG oslo_vmware.api [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1329.801397] env[61545]: value = "task-4256996" [ 1329.801397] env[61545]: _type = "Task" [ 1329.801397] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.816423] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.820574] env[61545]: DEBUG oslo_vmware.api [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.894920] env[61545]: DEBUG nova.compute.manager [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1329.895260] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1329.896889] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784c87ed-69eb-425c-afc6-2541b7b3a950 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.907355] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1329.907753] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a29eba85-016c-4356-8e13-1bea7e9ffdc5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.918588] env[61545]: DEBUG oslo_vmware.api [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1329.918588] env[61545]: value = "task-4256997" [ 1329.918588] env[61545]: _type = "Task" [ 1329.918588] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.928460] env[61545]: DEBUG oslo_vmware.api [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.105094] env[61545]: DEBUG nova.network.neutron [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Updating instance_info_cache with network_info: [{"id": "fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d", "address": "fa:16:3e:be:69:7c", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa7b2d4a-22", "ovs_interfaceid": "fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.140376] env[61545]: DEBUG nova.network.neutron [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance_info_cache with network_info: [{"id": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "address": "fa:16:3e:d6:2c:94", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ec8bc8-48", "ovs_interfaceid": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.252532] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Releasing lock "refresh_cache-c677a1fe-4c95-4142-8f1a-fcc8a21389c6" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.314041] env[61545]: DEBUG oslo_vmware.api [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4256995, 'name': PowerOnVM_Task, 'duration_secs': 0.411907} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.314518] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1330.315593] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-92d0bfbf-923b-4d6c-afe8-4706d76e4be8 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance '4e2eb270-abf0-4734-a49f-ac0b7ee141c8' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1330.321875] env[61545]: DEBUG oslo_vmware.api [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256996, 'name': PowerOffVM_Task, 'duration_secs': 0.204606} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.322307] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1330.322407] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1330.323648] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-782d73f3-047c-4af8-a2d3-d5128a80640c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.386705] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1330.387049] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1330.387308] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleting the datastore file [datastore2] 40bade64-b16b-4a33-a9ea-18f80a32c6bc {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1330.388849] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46c28088-bf6d-4514-a1a7-f2bc0b372807 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.396551] env[61545]: DEBUG oslo_vmware.api [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for the task: (returnval){ [ 1330.396551] env[61545]: value = "task-4256999" [ 1330.396551] env[61545]: _type = "Task" [ 1330.396551] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.407405] env[61545]: DEBUG oslo_vmware.api [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.429488] env[61545]: DEBUG oslo_vmware.api [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4256997, 'name': PowerOffVM_Task, 'duration_secs': 0.199679} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.429897] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1330.430133] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1330.430499] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07979c54-67d0-4477-a76f-55756b8aa432 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.502030] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1330.502208] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1330.502309] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleting the datastore file [datastore2] c677a1fe-4c95-4142-8f1a-fcc8a21389c6 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1330.502821] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01837768-2209-4c31-b0e8-a036bcc20738 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.510257] env[61545]: DEBUG oslo_vmware.api [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1330.510257] env[61545]: value = "task-4257001" [ 1330.510257] env[61545]: _type = "Task" [ 1330.510257] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.520981] env[61545]: DEBUG oslo_vmware.api [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4257001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.523470] env[61545]: DEBUG nova.compute.manager [req-a731f27d-c879-4360-b06e-298a35029f34 req-ce6e04e2-ce36-40c7-9aba-2eddd3dbd04d service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Received event network-changed-fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1330.523709] env[61545]: DEBUG nova.compute.manager [req-a731f27d-c879-4360-b06e-298a35029f34 req-ce6e04e2-ce36-40c7-9aba-2eddd3dbd04d service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Refreshing instance network info cache due to event network-changed-fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1330.523878] env[61545]: DEBUG oslo_concurrency.lockutils [req-a731f27d-c879-4360-b06e-298a35029f34 req-ce6e04e2-ce36-40c7-9aba-2eddd3dbd04d service nova] Acquiring lock "refresh_cache-5f82f8f7-be58-4a75-9420-2c321e480c26" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.612104] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "refresh_cache-5f82f8f7-be58-4a75-9420-2c321e480c26" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.614282] env[61545]: DEBUG nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Instance network_info: |[{"id": "fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d", "address": "fa:16:3e:be:69:7c", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa7b2d4a-22", "ovs_interfaceid": "fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1330.614282] env[61545]: DEBUG oslo_concurrency.lockutils [req-a731f27d-c879-4360-b06e-298a35029f34 req-ce6e04e2-ce36-40c7-9aba-2eddd3dbd04d service nova] Acquired lock "refresh_cache-5f82f8f7-be58-4a75-9420-2c321e480c26" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1330.614282] env[61545]: DEBUG nova.network.neutron [req-a731f27d-c879-4360-b06e-298a35029f34 req-ce6e04e2-ce36-40c7-9aba-2eddd3dbd04d service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Refreshing network info cache for port fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1330.614282] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:69:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a071ecf4-e713-4f97-9271-8c17952f6dee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1330.625135] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1330.625135] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1330.625349] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-820970bb-120c-4dac-b7d2-906ad1f00895 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.643362] env[61545]: DEBUG oslo_concurrency.lockutils [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.649450] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1330.649450] env[61545]: value = "task-4257002" [ 1330.649450] env[61545]: _type = "Task" [ 1330.649450] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.659345] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257002, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.759024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-efb55f40-1258-4353-b16e-591bf3a70256 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "interface-c677a1fe-4c95-4142-8f1a-fcc8a21389c6-67104b0f-71e5-40be-965b-8376a6c120f8" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.901s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.907731] env[61545]: DEBUG oslo_vmware.api [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Task: {'id': task-4256999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181517} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.907731] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1330.907731] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1330.908314] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1330.908314] env[61545]: INFO nova.compute.manager [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1330.908395] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1330.908520] env[61545]: DEBUG nova.compute.manager [-] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1330.908615] env[61545]: DEBUG nova.network.neutron [-] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1331.020906] env[61545]: DEBUG oslo_vmware.api [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4257001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186812} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.021225] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1331.021427] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1331.021623] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1331.021934] env[61545]: INFO nova.compute.manager [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1331.022307] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1331.022592] env[61545]: DEBUG nova.compute.manager [-] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1331.022763] env[61545]: DEBUG nova.network.neutron [-] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1331.163777] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257002, 'name': CreateVM_Task, 'duration_secs': 0.382926} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.163995] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1331.164732] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.164918] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1331.165281] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1331.165556] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f3e24aa-377b-4559-b94e-a9b8ea8eb20c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.171253] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1331.171253] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c3e01-4d46-62e6-4d27-1def69bc274d" [ 1331.171253] env[61545]: _type = "Task" [ 1331.171253] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.175808] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27411cc1-8521-49f3-91cd-a4082c90bd60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.183965] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c3e01-4d46-62e6-4d27-1def69bc274d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.198367] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c63547-6dae-48e6-87a8-110efec5aa8e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.206412] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance 'e7af11e5-7500-4fc7-8c68-651376dff297' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1331.683286] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]525c3e01-4d46-62e6-4d27-1def69bc274d, 'name': SearchDatastore_Task, 'duration_secs': 0.0356} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.683577] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1331.683809] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.684105] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.684289] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1331.684511] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.684826] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-958332b5-e096-4c5b-abc3-5eb8d268de11 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.695860] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.696096] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.696881] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de2f40e6-e926-4d24-9987-5c83b83d9348 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.703458] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1331.703458] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5278fd0b-5c31-4e88-30ba-3063e299c237" [ 1331.703458] env[61545]: _type = "Task" [ 1331.703458] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.713705] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1331.714163] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5278fd0b-5c31-4e88-30ba-3063e299c237, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.714385] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d42487a-f76d-4ac6-bd26-51018c7a3ae8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.722322] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1331.722322] env[61545]: value = "task-4257003" [ 1331.722322] env[61545]: _type = "Task" [ 1331.722322] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.733144] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.126398] env[61545]: DEBUG nova.network.neutron [req-a731f27d-c879-4360-b06e-298a35029f34 req-ce6e04e2-ce36-40c7-9aba-2eddd3dbd04d service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Updated VIF entry in instance network info cache for port fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1332.126767] env[61545]: DEBUG nova.network.neutron [req-a731f27d-c879-4360-b06e-298a35029f34 req-ce6e04e2-ce36-40c7-9aba-2eddd3dbd04d service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Updating instance_info_cache with network_info: [{"id": "fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d", "address": "fa:16:3e:be:69:7c", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa7b2d4a-22", "ovs_interfaceid": "fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.216205] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5278fd0b-5c31-4e88-30ba-3063e299c237, 'name': SearchDatastore_Task, 'duration_secs': 0.013017} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.217119] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7180155-45f4-4803-be44-0fa57433ec26 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.224109] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1332.224109] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f5e8f9-530f-9ab9-b3d4-99e2f0aa0852" [ 1332.224109] env[61545]: _type = "Task" [ 1332.224109] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.240510] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257003, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.247535] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f5e8f9-530f-9ab9-b3d4-99e2f0aa0852, 'name': SearchDatastore_Task, 'duration_secs': 0.011139} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.247969] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.248608] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5f82f8f7-be58-4a75-9420-2c321e480c26/5f82f8f7-be58-4a75-9420-2c321e480c26.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1332.248952] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-683d69ae-24d0-4852-86df-d6e30e5e64a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.259868] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1332.259868] env[61545]: value = "task-4257004" [ 1332.259868] env[61545]: _type = "Task" [ 1332.259868] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.283319] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.569220] env[61545]: DEBUG nova.compute.manager [req-d89d3ee3-592c-4afc-8773-d53f80904751 req-4a4b07ff-5319-4c8e-9d05-8272c83aca0b service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Received event network-vif-deleted-ff62e994-2e58-433b-884f-5b4fa7639d6b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1332.569842] env[61545]: INFO nova.compute.manager [req-d89d3ee3-592c-4afc-8773-d53f80904751 req-4a4b07ff-5319-4c8e-9d05-8272c83aca0b service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Neutron deleted interface ff62e994-2e58-433b-884f-5b4fa7639d6b; detaching it from the instance and deleting it from the info cache [ 1332.569842] env[61545]: DEBUG nova.network.neutron [req-d89d3ee3-592c-4afc-8773-d53f80904751 req-4a4b07ff-5319-4c8e-9d05-8272c83aca0b service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.629316] env[61545]: DEBUG oslo_concurrency.lockutils [req-a731f27d-c879-4360-b06e-298a35029f34 req-ce6e04e2-ce36-40c7-9aba-2eddd3dbd04d service nova] Releasing lock "refresh_cache-5f82f8f7-be58-4a75-9420-2c321e480c26" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.741832] env[61545]: DEBUG oslo_vmware.api [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257003, 'name': PowerOnVM_Task, 'duration_secs': 0.641901} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.742154] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1332.742357] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-adabe92f-574e-4ae9-b9d0-95f76ae7e58e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance 'e7af11e5-7500-4fc7-8c68-651376dff297' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1332.774168] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257004, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.785056] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1332.785252] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1332.785417] env[61545]: DEBUG nova.compute.manager [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Going to confirm migration 8 {{(pid=61545) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1332.804984] env[61545]: DEBUG nova.network.neutron [-] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.072749] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a688b2a-2739-4996-8e0c-245d082e6829 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.083816] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d60133c-af57-4d23-ba39-946c879a0c0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.116296] env[61545]: DEBUG nova.compute.manager [req-d89d3ee3-592c-4afc-8773-d53f80904751 req-4a4b07ff-5319-4c8e-9d05-8272c83aca0b service nova] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Detach interface failed, port_id=ff62e994-2e58-433b-884f-5b4fa7639d6b, reason: Instance 40bade64-b16b-4a33-a9ea-18f80a32c6bc could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1333.271136] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536791} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.272119] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 5f82f8f7-be58-4a75-9420-2c321e480c26/5f82f8f7-be58-4a75-9420-2c321e480c26.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1333.272298] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1333.272557] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17c73954-1920-4e13-b185-133fb6ee8058 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.276549] env[61545]: DEBUG nova.network.neutron [-] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.284125] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1333.284125] env[61545]: value = "task-4257005" [ 1333.284125] env[61545]: _type = "Task" [ 1333.284125] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.297975] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.314389] env[61545]: INFO nova.compute.manager [-] [instance: 40bade64-b16b-4a33-a9ea-18f80a32c6bc] Took 2.41 seconds to deallocate network for instance. [ 1333.333021] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.333280] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquired lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1333.333521] env[61545]: DEBUG nova.network.neutron [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1333.334136] env[61545]: DEBUG nova.objects.instance [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'info_cache' on Instance uuid 4e2eb270-abf0-4734-a49f-ac0b7ee141c8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.779474] env[61545]: INFO nova.compute.manager [-] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Took 2.76 seconds to deallocate network for instance. [ 1333.798306] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.185326} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.799848] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1333.799848] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b2bd5e-1a5f-4fab-9bcc-d86337de2242 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.823491] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1333.823749] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1333.824027] env[61545]: DEBUG nova.objects.instance [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lazy-loading 'resources' on Instance uuid 40bade64-b16b-4a33-a9ea-18f80a32c6bc {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.834973] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] 5f82f8f7-be58-4a75-9420-2c321e480c26/5f82f8f7-be58-4a75-9420-2c321e480c26.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1333.835998] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1286cf4a-24c6-47d7-80f5-ac837cac2d4c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.863059] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1333.863059] env[61545]: value = "task-4257006" [ 1333.863059] env[61545]: _type = "Task" [ 1333.863059] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.875394] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257006, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.292282] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.377333] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257006, 'name': ReconfigVM_Task, 'duration_secs': 0.293915} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.377634] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Reconfigured VM instance instance-00000079 to attach disk [datastore2] 5f82f8f7-be58-4a75-9420-2c321e480c26/5f82f8f7-be58-4a75-9420-2c321e480c26.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1334.378309] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21c269fc-73ba-408c-82f2-af5b1050dbf4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.388884] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1334.388884] env[61545]: value = "task-4257007" [ 1334.388884] env[61545]: _type = "Task" [ 1334.388884] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.398403] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257007, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.511379] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d2b444-ff8c-4e20-86cb-668bab565405 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.523316] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27ed2cf-26d1-4911-b834-aa631426348b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.564238] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260b47e4-4242-453e-a8c1-2127e46cf5d8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.567034] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "e7af11e5-7500-4fc7-8c68-651376dff297" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.567254] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.567445] env[61545]: DEBUG nova.compute.manager [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Going to confirm migration 9 {{(pid=61545) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1334.575474] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2dfa3b-d2cc-47a6-93ec-4cbccbc49625 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.592612] env[61545]: DEBUG nova.compute.provider_tree [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1334.605071] env[61545]: DEBUG nova.compute.manager [req-c147152d-8ffd-42ca-9cdf-de183226f7e8 req-18d53683-6976-4f42-a9f7-e7dc45dd14e3 service nova] [instance: c677a1fe-4c95-4142-8f1a-fcc8a21389c6] Received event network-vif-deleted-735d1ad5-27dd-48fe-9d11-abc15c2f647b {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1334.717685] env[61545]: DEBUG nova.network.neutron [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance_info_cache with network_info: [{"id": "9586e46e-23b8-40a2-9703-712bf31c9e96", "address": "fa:16:3e:f7:f4:1b", "network": {"id": "67354c7a-a299-47f4-a003-2491f3609eed", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-91812628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da39b1ee6df640b89a9dab58e3380397", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9586e46e-23", "ovs_interfaceid": "9586e46e-23b8-40a2-9703-712bf31c9e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.906823] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257007, 'name': Rename_Task, 'duration_secs': 0.160601} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.907191] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1334.907554] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b1befe1-b67a-472a-a00e-0fecfd319cc3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.914680] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1334.914680] env[61545]: value = "task-4257008" [ 1334.914680] env[61545]: _type = "Task" [ 1334.914680] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.923593] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.096448] env[61545]: DEBUG nova.scheduler.client.report [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1335.117678] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.117867] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1335.118059] env[61545]: DEBUG nova.network.neutron [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1335.118248] env[61545]: DEBUG nova.objects.instance [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lazy-loading 'info_cache' on Instance uuid e7af11e5-7500-4fc7-8c68-651376dff297 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1335.221217] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Releasing lock "refresh_cache-4e2eb270-abf0-4734-a49f-ac0b7ee141c8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.221495] env[61545]: DEBUG nova.objects.instance [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'migration_context' on Instance uuid 4e2eb270-abf0-4734-a49f-ac0b7ee141c8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1335.425518] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257008, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.602742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.605620] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.313s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.606328] env[61545]: DEBUG nova.objects.instance [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'resources' on Instance uuid c677a1fe-4c95-4142-8f1a-fcc8a21389c6 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1335.628262] env[61545]: INFO nova.scheduler.client.report [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Deleted allocations for instance 40bade64-b16b-4a33-a9ea-18f80a32c6bc [ 1335.726067] env[61545]: DEBUG nova.objects.base [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Object Instance<4e2eb270-abf0-4734-a49f-ac0b7ee141c8> lazy-loaded attributes: info_cache,migration_context {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1335.726848] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069ef2a5-6d51-47a7-b552-a9bb93bae8a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.748432] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54c8e5a2-2323-4886-9b7d-1d84da4524d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.754815] env[61545]: DEBUG oslo_vmware.api [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1335.754815] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52265b0a-e1c4-ffb8-66c3-f1445d2ca655" [ 1335.754815] env[61545]: _type = "Task" [ 1335.754815] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.763328] env[61545]: DEBUG oslo_vmware.api [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52265b0a-e1c4-ffb8-66c3-f1445d2ca655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.925251] env[61545]: DEBUG oslo_vmware.api [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257008, 'name': PowerOnVM_Task, 'duration_secs': 0.633593} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.925532] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1335.925737] env[61545]: INFO nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Took 8.54 seconds to spawn the instance on the hypervisor. [ 1335.925921] env[61545]: DEBUG nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1335.926754] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33c25a6-2ec5-49a5-bbb9-b8c21287d416 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.136696] env[61545]: DEBUG oslo_concurrency.lockutils [None req-87169eb0-dbab-4982-a21c-27ca05d08e8a tempest-ServerActionsTestOtherB-1077210391 tempest-ServerActionsTestOtherB-1077210391-project-member] Lock "40bade64-b16b-4a33-a9ea-18f80a32c6bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.876s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.262607] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b346688e-b161-4eb0-8e63-262854f6bc69 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.271220] env[61545]: DEBUG oslo_vmware.api [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52265b0a-e1c4-ffb8-66c3-f1445d2ca655, 'name': SearchDatastore_Task, 'duration_secs': 0.011051} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.272092] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.277088] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c39f4c-81f5-4449-96be-7a4d15a99951 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.313568] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78863242-569a-4a2f-a559-aea0a68e8157 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.322493] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6b1dc8-3726-43b7-b7cb-830819f32fad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.337313] env[61545]: DEBUG nova.compute.provider_tree [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.409611] env[61545]: DEBUG nova.network.neutron [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance_info_cache with network_info: [{"id": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "address": "fa:16:3e:d6:2c:94", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10ec8bc8-48", "ovs_interfaceid": "10ec8bc8-486b-4eea-8d00-5b81fe9f1380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.445660] env[61545]: INFO nova.compute.manager [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Took 12.94 seconds to build instance. [ 1336.841043] env[61545]: DEBUG nova.scheduler.client.report [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1336.913136] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-e7af11e5-7500-4fc7-8c68-651376dff297" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1336.913377] env[61545]: DEBUG nova.objects.instance [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lazy-loading 'migration_context' on Instance uuid e7af11e5-7500-4fc7-8c68-651376dff297 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1336.947367] env[61545]: DEBUG oslo_concurrency.lockutils [None req-5cac0357-4d81-448f-9d09-ffcbe0f0834e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "5f82f8f7-be58-4a75-9420-2c321e480c26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.448s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.345881] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.740s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.348374] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.076s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.374713] env[61545]: INFO nova.scheduler.client.report [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleted allocations for instance c677a1fe-4c95-4142-8f1a-fcc8a21389c6 [ 1337.416377] env[61545]: DEBUG nova.objects.base [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1337.417362] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad21012b-8f1b-4722-a610-88a38c334347 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.437941] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6a500c9-a3d4-4bc6-a5c6-76e7d234fc56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.444865] env[61545]: DEBUG oslo_vmware.api [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1337.444865] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5246563d-bd5d-0a91-c4db-eed44e28d120" [ 1337.444865] env[61545]: _type = "Task" [ 1337.444865] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.455622] env[61545]: DEBUG oslo_vmware.api [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5246563d-bd5d-0a91-c4db-eed44e28d120, 'name': SearchDatastore_Task, 'duration_secs': 0.007784} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.455940] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.569673] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "5f82f8f7-be58-4a75-9420-2c321e480c26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.570013] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "5f82f8f7-be58-4a75-9420-2c321e480c26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.570264] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "5f82f8f7-be58-4a75-9420-2c321e480c26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.570486] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "5f82f8f7-be58-4a75-9420-2c321e480c26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.570682] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "5f82f8f7-be58-4a75-9420-2c321e480c26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.573486] env[61545]: INFO nova.compute.manager [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Terminating instance [ 1337.884874] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3f570a2b-193e-47fc-a9ce-3bfc647c6399 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "c677a1fe-4c95-4142-8f1a-fcc8a21389c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.498s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1338.003225] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18560f8f-f537-4337-99fb-7bb8e38eb844 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.011763] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e267a4d0-4614-44b0-a5d4-712fcf5dedbd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.046210] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bd81f5-a34b-4edd-8201-fe1994efafa6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.055628] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7197b23d-3923-4dd6-8697-59f7623e1d95 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.071859] env[61545]: DEBUG nova.compute.provider_tree [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.080649] env[61545]: DEBUG nova.compute.manager [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1338.080879] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1338.082088] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e39360-c2dc-4342-965f-983b661266ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.091049] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.091309] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20056873-7ee3-48f1-84d5-6d4a3064e73d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.098916] env[61545]: DEBUG oslo_vmware.api [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1338.098916] env[61545]: value = "task-4257010" [ 1338.098916] env[61545]: _type = "Task" [ 1338.098916] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.108092] env[61545]: DEBUG oslo_vmware.api [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.576070] env[61545]: DEBUG nova.scheduler.client.report [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1338.609612] env[61545]: DEBUG oslo_vmware.api [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257010, 'name': PowerOffVM_Task, 'duration_secs': 0.22215} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.609947] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1338.611159] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1338.611159] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-137f31e9-361f-474f-8704-49bdff4f5ea4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.713955] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1338.714718] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1338.714970] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleting the datastore file [datastore2] 5f82f8f7-be58-4a75-9420-2c321e480c26 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1338.715302] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-786ff0f7-7efa-42f0-807f-0a4b2e92a493 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.724679] env[61545]: DEBUG oslo_vmware.api [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1338.724679] env[61545]: value = "task-4257012" [ 1338.724679] env[61545]: _type = "Task" [ 1338.724679] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.734400] env[61545]: DEBUG oslo_vmware.api [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.005020] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.005334] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.005552] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.005738] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.005913] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.008315] env[61545]: INFO nova.compute.manager [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Terminating instance [ 1339.236666] env[61545]: DEBUG oslo_vmware.api [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165329} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.236973] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1339.237223] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1339.237415] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1339.237599] env[61545]: INFO nova.compute.manager [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1339.237852] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1339.238071] env[61545]: DEBUG nova.compute.manager [-] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1339.238164] env[61545]: DEBUG nova.network.neutron [-] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1339.512693] env[61545]: DEBUG nova.compute.manager [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1339.512897] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1339.514077] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a5bc43-07ec-4904-bd79-11ccfa5dba79 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.525219] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1339.525538] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5438c9a8-d0b2-4781-9758-7434ae388b06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.536308] env[61545]: DEBUG oslo_vmware.api [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1339.536308] env[61545]: value = "task-4257013" [ 1339.536308] env[61545]: _type = "Task" [ 1339.536308] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.553567] env[61545]: DEBUG oslo_vmware.api [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4257013, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.579971] env[61545]: DEBUG nova.compute.manager [req-28aa0455-594c-4160-844b-aadf37c32816 req-46748b29-19e3-4a43-b7d4-e047ce0b5179 service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Received event network-vif-deleted-fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1339.581178] env[61545]: INFO nova.compute.manager [req-28aa0455-594c-4160-844b-aadf37c32816 req-46748b29-19e3-4a43-b7d4-e047ce0b5179 service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Neutron deleted interface fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d; detaching it from the instance and deleting it from the info cache [ 1339.581414] env[61545]: DEBUG nova.network.neutron [req-28aa0455-594c-4160-844b-aadf37c32816 req-46748b29-19e3-4a43-b7d4-e047ce0b5179 service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.587858] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.239s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.590793] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.135s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.047701] env[61545]: DEBUG nova.network.neutron [-] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.049254] env[61545]: DEBUG oslo_vmware.api [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4257013, 'name': PowerOffVM_Task, 'duration_secs': 0.309078} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.049805] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1340.050040] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1340.050329] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a2b6f3d-b94b-4660-9cb3-c7eb244cb1b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.084805] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87f93a50-2061-402f-bbd7-9b4e7a975ae2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.095677] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ef2792-86a4-4bba-91dd-0e8437f42640 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.120135] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1340.120135] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1340.120135] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleting the datastore file [datastore2] e24a6086-7dd1-4e75-b49e-dcc7c28eaea8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1340.120135] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8071ba62-e4ae-4624-b007-e3b51e096326 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.126982] env[61545]: DEBUG oslo_vmware.api [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for the task: (returnval){ [ 1340.126982] env[61545]: value = "task-4257015" [ 1340.126982] env[61545]: _type = "Task" [ 1340.126982] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.139514] env[61545]: DEBUG nova.compute.manager [req-28aa0455-594c-4160-844b-aadf37c32816 req-46748b29-19e3-4a43-b7d4-e047ce0b5179 service nova] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Detach interface failed, port_id=fa7b2d4a-22fa-4baa-8d17-381eb5d1ed5d, reason: Instance 5f82f8f7-be58-4a75-9420-2c321e480c26 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1340.151900] env[61545]: DEBUG oslo_vmware.api [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4257015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.186975] env[61545]: INFO nova.scheduler.client.report [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted allocation for migration ddb5e038-2bfa-4a2d-9185-4e47cd743140 [ 1340.260952] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf8c9f8-5067-48e2-b20e-54e60d0c571f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.269279] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846f8093-a293-4705-8950-c87f6a2f53a3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.301676] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7488123-846e-4d8f-ac28-3a1466dbb73b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.310586] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9e5df0-5eac-4edb-b54f-db876b895a76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.327632] env[61545]: DEBUG nova.compute.provider_tree [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1340.525500] env[61545]: INFO nova.compute.manager [None req-c45f36ae-3bfd-4745-a501-3c117aad1572 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Get console output [ 1340.525934] env[61545]: WARNING nova.virt.vmwareapi.driver [None req-c45f36ae-3bfd-4745-a501-3c117aad1572 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] The console log is missing. Check your VSPC configuration [ 1340.550915] env[61545]: INFO nova.compute.manager [-] [instance: 5f82f8f7-be58-4a75-9420-2c321e480c26] Took 1.31 seconds to deallocate network for instance. [ 1340.637749] env[61545]: DEBUG oslo_vmware.api [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Task: {'id': task-4257015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198353} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.638140] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1340.638241] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1340.638403] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1340.638576] env[61545]: INFO nova.compute.manager [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1340.638815] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1340.639015] env[61545]: DEBUG nova.compute.manager [-] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1340.639113] env[61545]: DEBUG nova.network.neutron [-] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1340.695165] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2c484c69-92b3-43fa-8642-215845cfec10 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.910s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.830989] env[61545]: DEBUG nova.scheduler.client.report [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1341.057849] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.372413] env[61545]: DEBUG nova.network.neutron [-] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.609325] env[61545]: DEBUG nova.compute.manager [req-c6568a8a-631a-4779-b6e0-b2b736041882 req-38c7e4a7-163a-48bf-b8d8-ffc839b84454 service nova] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Received event network-vif-deleted-3a7d71d2-6873-48b1-8e3f-5b6ca398f5c9 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1341.843430] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.253s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.846658] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.789s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.846930] env[61545]: DEBUG nova.objects.instance [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lazy-loading 'resources' on Instance uuid 5f82f8f7-be58-4a75-9420-2c321e480c26 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.874547] env[61545]: INFO nova.compute.manager [-] [instance: e24a6086-7dd1-4e75-b49e-dcc7c28eaea8] Took 1.24 seconds to deallocate network for instance. [ 1342.382926] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.399437] env[61545]: INFO nova.scheduler.client.report [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted allocation for migration 48f4f990-0e12-40ad-a925-28b1254e1dc7 [ 1342.477739] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8223fb8b-ce1c-4eb5-8e50-c65693105f61 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.486129] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7309d5d6-63cf-422b-af63-ebb802e5dff5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.519122] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63006f69-b502-427a-9c64-d7d630ae49ad {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.527502] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d696dd5-b7cb-49ab-818e-85df7fd9e8b7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.543053] env[61545]: DEBUG nova.compute.provider_tree [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.636059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "983082dd-274c-4441-b386-caf775336ef0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.636310] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "983082dd-274c-4441-b386-caf775336ef0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.905545] env[61545]: DEBUG oslo_concurrency.lockutils [None req-ef0f46fa-d6b9-49d8-a981-2d2cbe304c8a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.338s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.046917] env[61545]: DEBUG nova.scheduler.client.report [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1343.138272] env[61545]: DEBUG nova.compute.manager [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1343.551658] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.554415] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.172s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.554680] env[61545]: DEBUG nova.objects.instance [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lazy-loading 'resources' on Instance uuid e24a6086-7dd1-4e75-b49e-dcc7c28eaea8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1343.580877] env[61545]: INFO nova.scheduler.client.report [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted allocations for instance 5f82f8f7-be58-4a75-9420-2c321e480c26 [ 1343.661728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.727628] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "e7af11e5-7500-4fc7-8c68-651376dff297" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.727907] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.728191] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.728403] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.728579] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.731396] env[61545]: INFO nova.compute.manager [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Terminating instance [ 1344.090454] env[61545]: DEBUG oslo_concurrency.lockutils [None req-059da6ed-b172-4f9a-847a-57d888659458 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "5f82f8f7-be58-4a75-9420-2c321e480c26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.520s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1344.194643] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81f5c94-c9fc-4df2-b346-1de2ec13dc5f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.205799] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366b37cb-ef01-4053-a51d-49bb24c3f478 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.245259] env[61545]: DEBUG nova.compute.manager [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1344.245538] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1344.251054] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a54773-09b3-4f3f-be9c-237a17f60765 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.253062] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f294ab-2d49-4b63-bf27-6d3d5d7eb40b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.259920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "55917182-ba42-4f29-aecd-134c899e3028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.260280] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.273681] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cacb9d-d4d4-4d0b-a22a-5ba7079209ea {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.278731] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1344.280169] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97ec9858-684d-47f1-a267-5f4d3eb4baf5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.295777] env[61545]: DEBUG nova.compute.provider_tree [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.300326] env[61545]: DEBUG oslo_vmware.api [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1344.300326] env[61545]: value = "task-4257016" [ 1344.300326] env[61545]: _type = "Task" [ 1344.300326] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.314178] env[61545]: DEBUG oslo_vmware.api [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.759206] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.759447] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.762609] env[61545]: DEBUG nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1344.798749] env[61545]: DEBUG nova.scheduler.client.report [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1344.812479] env[61545]: DEBUG oslo_vmware.api [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257016, 'name': PowerOffVM_Task, 'duration_secs': 0.26473} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.812857] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1344.813069] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1344.813925] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1db3357c-a660-4d8c-af99-8d446318e06a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.883840] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1344.884134] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1344.884409] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleting the datastore file [datastore2] e7af11e5-7500-4fc7-8c68-651376dff297 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1344.884784] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14a16499-5763-4af5-8223-b7cc46a5367e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.892475] env[61545]: DEBUG oslo_vmware.api [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1344.892475] env[61545]: value = "task-4257018" [ 1344.892475] env[61545]: _type = "Task" [ 1344.892475] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.903405] env[61545]: DEBUG oslo_vmware.api [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.262440] env[61545]: DEBUG nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1345.286218] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.304668] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.307569] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.646s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.309826] env[61545]: INFO nova.compute.claims [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1345.323855] env[61545]: INFO nova.scheduler.client.report [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Deleted allocations for instance e24a6086-7dd1-4e75-b49e-dcc7c28eaea8 [ 1345.403356] env[61545]: DEBUG oslo_vmware.api [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15323} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.403625] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1345.403780] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1345.403963] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1345.404159] env[61545]: INFO nova.compute.manager [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1345.404405] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1345.404595] env[61545]: DEBUG nova.compute.manager [-] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1345.404681] env[61545]: DEBUG nova.network.neutron [-] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1345.660737] env[61545]: DEBUG nova.compute.manager [req-23fc3e31-2009-4593-9438-f27290b84308 req-018f3fa2-a9d2-45db-985f-39b32c4fadaa service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Received event network-vif-deleted-10ec8bc8-486b-4eea-8d00-5b81fe9f1380 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1345.660990] env[61545]: INFO nova.compute.manager [req-23fc3e31-2009-4593-9438-f27290b84308 req-018f3fa2-a9d2-45db-985f-39b32c4fadaa service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Neutron deleted interface 10ec8bc8-486b-4eea-8d00-5b81fe9f1380; detaching it from the instance and deleting it from the info cache [ 1345.661143] env[61545]: DEBUG nova.network.neutron [req-23fc3e31-2009-4593-9438-f27290b84308 req-018f3fa2-a9d2-45db-985f-39b32c4fadaa service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.784431] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.832672] env[61545]: DEBUG oslo_concurrency.lockutils [None req-27748f83-14f7-4b18-9e44-2011f461ba27 tempest-AttachInterfacesTestJSON-1792420423 tempest-AttachInterfacesTestJSON-1792420423-project-member] Lock "e24a6086-7dd1-4e75-b49e-dcc7c28eaea8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.827s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.138914] env[61545]: DEBUG nova.network.neutron [-] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.164549] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-182ec07a-5e24-4781-8e1f-8fefb307b1d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.175270] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fed151-a083-4530-99ab-55ee312126e1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.202835] env[61545]: DEBUG nova.compute.manager [req-23fc3e31-2009-4593-9438-f27290b84308 req-018f3fa2-a9d2-45db-985f-39b32c4fadaa service nova] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Detach interface failed, port_id=10ec8bc8-486b-4eea-8d00-5b81fe9f1380, reason: Instance e7af11e5-7500-4fc7-8c68-651376dff297 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1346.440945] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037efc9a-fee5-45db-8dc0-27dc41a74156 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.449845] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cb97fb-3f4f-4345-9ff5-0d8af051bf8a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.482594] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4785cbaa-b16b-4583-aa8d-dcb34890ea93 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.491682] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021b289c-ac73-4628-998e-b546cf67e539 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.508208] env[61545]: DEBUG nova.compute.provider_tree [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1346.642533] env[61545]: INFO nova.compute.manager [-] [instance: e7af11e5-7500-4fc7-8c68-651376dff297] Took 1.24 seconds to deallocate network for instance. [ 1347.031539] env[61545]: ERROR nova.scheduler.client.report [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [req-9e91fd89-cb80-4ac0-aa71-6065daeca49e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9e91fd89-cb80-4ac0-aa71-6065daeca49e"}]} [ 1347.047911] env[61545]: DEBUG nova.scheduler.client.report [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1347.062382] env[61545]: DEBUG nova.scheduler.client.report [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1347.062611] env[61545]: DEBUG nova.compute.provider_tree [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1347.074320] env[61545]: DEBUG nova.scheduler.client.report [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1347.093087] env[61545]: DEBUG nova.scheduler.client.report [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1347.149578] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.209648] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be718879-2db9-4bfb-922c-b9f42abf592f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.218141] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe0437f-df2a-4fe6-8d9d-8ecad09d42a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.250368] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8b8be1-436c-4d56-ab53-b3f677a15519 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.258836] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c91868-6b21-4a30-95fe-4c3dfd73ee23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.273071] env[61545]: DEBUG nova.compute.provider_tree [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1347.806465] env[61545]: DEBUG nova.scheduler.client.report [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 185 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1347.806841] env[61545]: DEBUG nova.compute.provider_tree [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 185 to 186 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1347.807065] env[61545]: DEBUG nova.compute.provider_tree [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1348.312094] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.005s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.312645] env[61545]: DEBUG nova.compute.manager [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1348.316064] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.030s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1348.317085] env[61545]: INFO nova.compute.claims [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1348.821224] env[61545]: DEBUG nova.compute.utils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1348.824441] env[61545]: DEBUG nova.compute.manager [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1349.326527] env[61545]: DEBUG nova.compute.manager [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1349.459961] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d14e1c6-23d9-4eb5-954a-d07df1387e2f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.468087] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7aa3c35-cfd0-4582-a73c-762f85046ac8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.501303] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3226dada-2539-453d-afc8-a84fef4a4f18 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.509872] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a0d7dc-28e3-4f06-b5fa-d5e1f657bac9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.525349] env[61545]: DEBUG nova.compute.provider_tree [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.030113] env[61545]: DEBUG nova.scheduler.client.report [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1350.343701] env[61545]: DEBUG nova.compute.manager [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1350.374284] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1350.374284] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1350.374284] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1350.374454] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1350.374454] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1350.374623] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1350.374796] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1350.374957] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1350.375140] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1350.375294] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1350.375465] env[61545]: DEBUG nova.virt.hardware [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1350.376387] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e7ac57-ee2a-496c-a993-b3be39a0cdf8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.385213] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad208d7-a352-49e1-8cba-0c055b91b544 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.399651] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1350.405561] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Creating folder: Project (f7a7195bb1294790a97476453795d283). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1350.405917] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-602c7e78-f4cf-463f-ae17-51fddb38d5db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.419985] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Created folder: Project (f7a7195bb1294790a97476453795d283) in parent group-v838542. [ 1350.420364] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Creating folder: Instances. Parent ref: group-v838872. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1350.420745] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f7d3837-8369-4142-94ad-c0e5462022b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.432112] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Created folder: Instances in parent group-v838872. [ 1350.432381] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1350.432582] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1350.432798] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b20973c-1c6c-46ba-aaf4-9843260d2ba2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.450693] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1350.450693] env[61545]: value = "task-4257021" [ 1350.450693] env[61545]: _type = "Task" [ 1350.450693] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.462383] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257021, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.535229] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.219s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.535825] env[61545]: DEBUG nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1350.538843] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.755s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.540372] env[61545]: INFO nova.compute.claims [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1350.961169] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257021, 'name': CreateVM_Task, 'duration_secs': 0.285498} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.961358] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.961801] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.961992] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.962349] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1350.962613] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6926031c-ca51-4d7d-a06f-6df17d741041 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.967707] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1350.967707] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d4a24b-2c64-329c-fb66-9de05dce69bc" [ 1350.967707] env[61545]: _type = "Task" [ 1350.967707] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.978207] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d4a24b-2c64-329c-fb66-9de05dce69bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.044851] env[61545]: DEBUG nova.compute.utils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1351.052209] env[61545]: DEBUG nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1351.052209] env[61545]: DEBUG nova.network.neutron [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1351.097913] env[61545]: DEBUG nova.policy [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4333754ae4a4e26bab98dfe1853e667', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b64f16b672ff471ba1d48aa2490b9829', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1351.420402] env[61545]: DEBUG nova.network.neutron [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Successfully created port: f859288c-a2db-438a-8bdb-5c2669003b88 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1351.480979] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d4a24b-2c64-329c-fb66-9de05dce69bc, 'name': SearchDatastore_Task, 'duration_secs': 0.011122} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.480979] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.481265] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1351.481477] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.481660] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1351.481920] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1351.482512] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c73f2fa3-00d3-4536-896c-1e087df076dc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.491758] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1351.492691] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1351.492920] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-974c436a-88d2-4470-868e-643fa732e646 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.498749] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1351.498749] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ed0435-9903-b66b-36eb-d83a95dd16f6" [ 1351.498749] env[61545]: _type = "Task" [ 1351.498749] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.508725] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ed0435-9903-b66b-36eb-d83a95dd16f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.549854] env[61545]: DEBUG nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1351.705877] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebaeddb4-6f4e-4220-817d-df8bf16f1399 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.715753] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc14a23-db61-4081-bc29-aba8a2ab97c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.747901] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ecc772-686c-4695-9ca9-5849033b4db5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.756343] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2d78c7-03c9-4492-a774-efc488c7ccf4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.770892] env[61545]: DEBUG nova.compute.provider_tree [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.012081] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52ed0435-9903-b66b-36eb-d83a95dd16f6, 'name': SearchDatastore_Task, 'duration_secs': 0.010722} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.012969] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64cab321-0325-4904-9869-22f8ac34214a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.019056] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1352.019056] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529585a7-6a69-051b-73af-6807c06d71d5" [ 1352.019056] env[61545]: _type = "Task" [ 1352.019056] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.027910] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529585a7-6a69-051b-73af-6807c06d71d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.278352] env[61545]: DEBUG nova.scheduler.client.report [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1352.531185] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]529585a7-6a69-051b-73af-6807c06d71d5, 'name': SearchDatastore_Task, 'duration_secs': 0.01215} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.531473] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1352.531735] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 983082dd-274c-4441-b386-caf775336ef0/983082dd-274c-4441-b386-caf775336ef0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1352.532086] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72f8f738-47ee-4060-aa69-ecd6d7423750 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.541097] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1352.541097] env[61545]: value = "task-4257022" [ 1352.541097] env[61545]: _type = "Task" [ 1352.541097] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.550333] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257022, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.563054] env[61545]: DEBUG nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1352.593033] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1352.593382] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1352.593545] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1352.593744] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1352.593948] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1352.594175] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1352.594452] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1352.594647] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1352.594849] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1352.595069] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1352.595303] env[61545]: DEBUG nova.virt.hardware [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1352.596308] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94133559-d6d7-4124-a329-57f127d68622 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.605502] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ac653d-510f-401a-a25a-09837949ae73 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.786104] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.247s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.786808] env[61545]: DEBUG nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1352.789723] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.640s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.790076] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.816875] env[61545]: INFO nova.scheduler.client.report [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted allocations for instance e7af11e5-7500-4fc7-8c68-651376dff297 [ 1352.925161] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquiring lock "da648e8c-aea3-4731-ad2b-719a15f29abf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1352.925497] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "da648e8c-aea3-4731-ad2b-719a15f29abf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1353.058075] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257022, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.199474] env[61545]: DEBUG nova.compute.manager [req-6e289e75-d9df-468a-9594-1384c73dd5db req-e77380b2-7238-4f7b-bcec-5fb624934f04 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Received event network-vif-plugged-f859288c-a2db-438a-8bdb-5c2669003b88 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1353.199789] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e289e75-d9df-468a-9594-1384c73dd5db req-e77380b2-7238-4f7b-bcec-5fb624934f04 service nova] Acquiring lock "55917182-ba42-4f29-aecd-134c899e3028-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1353.200095] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e289e75-d9df-468a-9594-1384c73dd5db req-e77380b2-7238-4f7b-bcec-5fb624934f04 service nova] Lock "55917182-ba42-4f29-aecd-134c899e3028-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1353.200300] env[61545]: DEBUG oslo_concurrency.lockutils [req-6e289e75-d9df-468a-9594-1384c73dd5db req-e77380b2-7238-4f7b-bcec-5fb624934f04 service nova] Lock "55917182-ba42-4f29-aecd-134c899e3028-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.200478] env[61545]: DEBUG nova.compute.manager [req-6e289e75-d9df-468a-9594-1384c73dd5db req-e77380b2-7238-4f7b-bcec-5fb624934f04 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] No waiting events found dispatching network-vif-plugged-f859288c-a2db-438a-8bdb-5c2669003b88 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1353.200648] env[61545]: WARNING nova.compute.manager [req-6e289e75-d9df-468a-9594-1384c73dd5db req-e77380b2-7238-4f7b-bcec-5fb624934f04 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Received unexpected event network-vif-plugged-f859288c-a2db-438a-8bdb-5c2669003b88 for instance with vm_state building and task_state spawning. [ 1353.285960] env[61545]: DEBUG nova.network.neutron [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Successfully updated port: f859288c-a2db-438a-8bdb-5c2669003b88 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1353.294833] env[61545]: DEBUG nova.compute.utils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1353.296302] env[61545]: DEBUG nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1353.296504] env[61545]: DEBUG nova.network.neutron [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1353.328069] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4ae1ed87-74e4-4be5-ad1e-5b5aca47811e tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "e7af11e5-7500-4fc7-8c68-651376dff297" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.600s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.357822] env[61545]: DEBUG nova.policy [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d0d78511dd5408cba4db4e57271b5c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b790c7b2af394de28f7f42ce0d230346', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1353.428692] env[61545]: DEBUG nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1353.553346] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257022, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720875} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.553632] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 983082dd-274c-4441-b386-caf775336ef0/983082dd-274c-4441-b386-caf775336ef0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1353.553851] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1353.554136] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4b3caae-1baa-4303-aa9c-591b18495b5b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.561262] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1353.561262] env[61545]: value = "task-4257023" [ 1353.561262] env[61545]: _type = "Task" [ 1353.561262] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.571484] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257023, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.682213] env[61545]: DEBUG nova.network.neutron [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Successfully created port: 2ff38b08-dfa0-4c6f-946d-b496a5074f97 {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1353.789688] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.789688] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1353.789688] env[61545]: DEBUG nova.network.neutron [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.799876] env[61545]: DEBUG nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1353.953203] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1353.953568] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1353.955348] env[61545]: INFO nova.compute.claims [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1354.071922] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257023, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075999} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.072236] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1354.073049] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdfdfb7-cda4-43d2-894e-7c9ebf39cc97 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.093518] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 983082dd-274c-4441-b386-caf775336ef0/983082dd-274c-4441-b386-caf775336ef0.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1354.093859] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0c6356c-ad56-4427-b806-e6b311f9645a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.114574] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1354.114574] env[61545]: value = "task-4257024" [ 1354.114574] env[61545]: _type = "Task" [ 1354.114574] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.123022] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257024, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.332798] env[61545]: DEBUG nova.network.neutron [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1354.504865] env[61545]: DEBUG nova.network.neutron [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance_info_cache with network_info: [{"id": "f859288c-a2db-438a-8bdb-5c2669003b88", "address": "fa:16:3e:aa:ff:a6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf859288c-a2", "ovs_interfaceid": "f859288c-a2db-438a-8bdb-5c2669003b88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.628041] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257024, 'name': ReconfigVM_Task, 'duration_secs': 0.281801} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.628224] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 983082dd-274c-4441-b386-caf775336ef0/983082dd-274c-4441-b386-caf775336ef0.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1354.628738] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5b8da7d-b05e-4035-8a46-c3ff87078e54 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.637261] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1354.637261] env[61545]: value = "task-4257025" [ 1354.637261] env[61545]: _type = "Task" [ 1354.637261] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.647792] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257025, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.809336] env[61545]: DEBUG nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1354.837814] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1354.838082] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1354.838247] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1354.838436] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1354.838581] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1354.838734] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1354.838937] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1354.839117] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1354.839290] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1354.839466] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1354.839661] env[61545]: DEBUG nova.virt.hardware [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1354.840562] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ddee3d-54ef-4045-88c5-3b41abcdbca7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.849369] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c33455-5f1e-4496-9f70-64fbde5d9329 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.010167] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1355.010556] env[61545]: DEBUG nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Instance network_info: |[{"id": "f859288c-a2db-438a-8bdb-5c2669003b88", "address": "fa:16:3e:aa:ff:a6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf859288c-a2", "ovs_interfaceid": "f859288c-a2db-438a-8bdb-5c2669003b88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1355.011541] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:ff:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f859288c-a2db-438a-8bdb-5c2669003b88', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1355.019036] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1355.019036] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1355.019269] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf573199-657e-4e05-a594-a7c00abcb511 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.043990] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1355.043990] env[61545]: value = "task-4257026" [ 1355.043990] env[61545]: _type = "Task" [ 1355.043990] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.054404] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257026, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.100199] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2107636-972a-4a45-abb6-0595dd46abd7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.108226] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a8e5ff-3493-4556-8e3c-f19e199c98e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.146629] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed0eae6-aba3-4690-8449-e8ac18e44096 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.157221] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6ed1cd-01e4-4665-ae4c-d60d8891a0da {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.161393] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257025, 'name': Rename_Task, 'duration_secs': 0.145248} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.161698] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1355.162422] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b59c798-9d7d-4926-b61d-837f30549438 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.173551] env[61545]: DEBUG nova.compute.provider_tree [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1355.176369] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1355.176369] env[61545]: value = "task-4257027" [ 1355.176369] env[61545]: _type = "Task" [ 1355.176369] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.187078] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257027, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.238598] env[61545]: DEBUG nova.compute.manager [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Received event network-changed-f859288c-a2db-438a-8bdb-5c2669003b88 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1355.238778] env[61545]: DEBUG nova.compute.manager [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Refreshing instance network info cache due to event network-changed-f859288c-a2db-438a-8bdb-5c2669003b88. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1355.238997] env[61545]: DEBUG oslo_concurrency.lockutils [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] Acquiring lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.239157] env[61545]: DEBUG oslo_concurrency.lockutils [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] Acquired lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1355.239319] env[61545]: DEBUG nova.network.neutron [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Refreshing network info cache for port f859288c-a2db-438a-8bdb-5c2669003b88 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1355.278759] env[61545]: DEBUG nova.network.neutron [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Successfully updated port: 2ff38b08-dfa0-4c6f-946d-b496a5074f97 {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1355.555426] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257026, 'name': CreateVM_Task, 'duration_secs': 0.406677} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.555799] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1355.556357] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.556526] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1355.556870] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1355.557171] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-951ad7f2-8ce5-453f-949f-d69b1638c369 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.563633] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1355.563633] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5233275b-da24-d565-8c16-579948e45763" [ 1355.563633] env[61545]: _type = "Task" [ 1355.563633] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.573123] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5233275b-da24-d565-8c16-579948e45763, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.678539] env[61545]: DEBUG nova.scheduler.client.report [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1355.693232] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257027, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.782037] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "refresh_cache-ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.782217] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "refresh_cache-ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1355.782389] env[61545]: DEBUG nova.network.neutron [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1356.074289] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5233275b-da24-d565-8c16-579948e45763, 'name': SearchDatastore_Task, 'duration_secs': 0.015234} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.074632] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.074867] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1356.075125] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.075275] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1356.075455] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1356.075717] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-357eaaf0-fd3a-4d88-862a-d31204c30409 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.085106] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1356.085302] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1356.086047] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52820ad1-d363-472d-9fc7-a0e766f3d311 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.092381] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1356.092381] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52464710-c0aa-0a73-95d5-d9575e241ba6" [ 1356.092381] env[61545]: _type = "Task" [ 1356.092381] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.102017] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52464710-c0aa-0a73-95d5-d9575e241ba6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.183232] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.183827] env[61545]: DEBUG nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1356.197226] env[61545]: DEBUG oslo_vmware.api [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257027, 'name': PowerOnVM_Task, 'duration_secs': 0.527562} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.197496] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1356.197694] env[61545]: INFO nova.compute.manager [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Took 5.85 seconds to spawn the instance on the hypervisor. [ 1356.197868] env[61545]: DEBUG nova.compute.manager [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.198739] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eeab9b8-b8c1-4396-8ae4-c2ba39c5b398 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.223314] env[61545]: DEBUG nova.network.neutron [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updated VIF entry in instance network info cache for port f859288c-a2db-438a-8bdb-5c2669003b88. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1356.223669] env[61545]: DEBUG nova.network.neutron [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance_info_cache with network_info: [{"id": "f859288c-a2db-438a-8bdb-5c2669003b88", "address": "fa:16:3e:aa:ff:a6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf859288c-a2", "ovs_interfaceid": "f859288c-a2db-438a-8bdb-5c2669003b88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.315892] env[61545]: DEBUG nova.network.neutron [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1356.459037] env[61545]: DEBUG nova.network.neutron [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Updating instance_info_cache with network_info: [{"id": "2ff38b08-dfa0-4c6f-946d-b496a5074f97", "address": "fa:16:3e:4a:45:10", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff38b08-df", "ovs_interfaceid": "2ff38b08-dfa0-4c6f-946d-b496a5074f97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.604288] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52464710-c0aa-0a73-95d5-d9575e241ba6, 'name': SearchDatastore_Task, 'duration_secs': 0.010825} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.605189] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66a4bca7-d852-4fc3-a32a-b8e2f2c31828 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.612186] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1356.612186] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e21b02-3406-908d-d580-a43416a00d07" [ 1356.612186] env[61545]: _type = "Task" [ 1356.612186] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.622654] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e21b02-3406-908d-d580-a43416a00d07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.693557] env[61545]: DEBUG nova.compute.utils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1356.694945] env[61545]: DEBUG nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1356.695139] env[61545]: DEBUG nova.network.neutron [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1356.716952] env[61545]: INFO nova.compute.manager [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Took 13.07 seconds to build instance. [ 1356.726586] env[61545]: DEBUG oslo_concurrency.lockutils [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] Releasing lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.726854] env[61545]: DEBUG nova.compute.manager [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Received event network-vif-plugged-2ff38b08-dfa0-4c6f-946d-b496a5074f97 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1356.727061] env[61545]: DEBUG oslo_concurrency.lockutils [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] Acquiring lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.727286] env[61545]: DEBUG oslo_concurrency.lockutils [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.727449] env[61545]: DEBUG oslo_concurrency.lockutils [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.727618] env[61545]: DEBUG nova.compute.manager [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] No waiting events found dispatching network-vif-plugged-2ff38b08-dfa0-4c6f-946d-b496a5074f97 {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1356.727788] env[61545]: WARNING nova.compute.manager [req-2905ec0f-6893-4f7d-865b-acbbcc195455 req-4851890c-7ca3-4ac1-8c22-1d65c51912c4 service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Received unexpected event network-vif-plugged-2ff38b08-dfa0-4c6f-946d-b496a5074f97 for instance with vm_state building and task_state spawning. [ 1356.784136] env[61545]: DEBUG nova.policy [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c334cb5c2c4b47d5b25bf89dc737f68f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16df87c509e74681a6a9f578bcaf44db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1356.961423] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "refresh_cache-ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.961760] env[61545]: DEBUG nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Instance network_info: |[{"id": "2ff38b08-dfa0-4c6f-946d-b496a5074f97", "address": "fa:16:3e:4a:45:10", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff38b08-df", "ovs_interfaceid": "2ff38b08-dfa0-4c6f-946d-b496a5074f97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1356.962452] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:45:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a071ecf4-e713-4f97-9271-8c17952f6dee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ff38b08-dfa0-4c6f-946d-b496a5074f97', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1356.970697] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1356.971334] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1356.971739] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3738666-cfe4-40ec-a68c-c2528cd241f8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.993286] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1356.993286] env[61545]: value = "task-4257028" [ 1356.993286] env[61545]: _type = "Task" [ 1356.993286] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.002710] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257028, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.125302] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e21b02-3406-908d-d580-a43416a00d07, 'name': SearchDatastore_Task, 'duration_secs': 0.011111} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.125668] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1357.125939] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 55917182-ba42-4f29-aecd-134c899e3028/55917182-ba42-4f29-aecd-134c899e3028.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1357.126238] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d79ff3f2-03be-40ed-a327-677abbee4242 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.133490] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1357.133490] env[61545]: value = "task-4257029" [ 1357.133490] env[61545]: _type = "Task" [ 1357.133490] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.143442] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.177735] env[61545]: DEBUG nova.network.neutron [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Successfully created port: 62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1357.198601] env[61545]: DEBUG nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1357.219262] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b19f5043-6f8b-4d8c-a2b4-b03cd3ce87f2 tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "983082dd-274c-4441-b386-caf775336ef0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.583s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1357.272371] env[61545]: DEBUG nova.compute.manager [req-a474fd7e-e4d7-4a36-b108-07afdbbef7a4 req-28c447d0-e593-4357-80e9-6f3757a0f194 service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Received event network-changed-2ff38b08-dfa0-4c6f-946d-b496a5074f97 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1357.273338] env[61545]: DEBUG nova.compute.manager [req-a474fd7e-e4d7-4a36-b108-07afdbbef7a4 req-28c447d0-e593-4357-80e9-6f3757a0f194 service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Refreshing instance network info cache due to event network-changed-2ff38b08-dfa0-4c6f-946d-b496a5074f97. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1357.273692] env[61545]: DEBUG oslo_concurrency.lockutils [req-a474fd7e-e4d7-4a36-b108-07afdbbef7a4 req-28c447d0-e593-4357-80e9-6f3757a0f194 service nova] Acquiring lock "refresh_cache-ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.273967] env[61545]: DEBUG oslo_concurrency.lockutils [req-a474fd7e-e4d7-4a36-b108-07afdbbef7a4 req-28c447d0-e593-4357-80e9-6f3757a0f194 service nova] Acquired lock "refresh_cache-ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1357.274343] env[61545]: DEBUG nova.network.neutron [req-a474fd7e-e4d7-4a36-b108-07afdbbef7a4 req-28c447d0-e593-4357-80e9-6f3757a0f194 service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Refreshing network info cache for port 2ff38b08-dfa0-4c6f-946d-b496a5074f97 {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1357.422064] env[61545]: INFO nova.compute.manager [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Rebuilding instance [ 1357.474748] env[61545]: DEBUG nova.compute.manager [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1357.475697] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46ee5e1-2e3e-44fd-a692-b3dffad88276 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.505778] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257028, 'name': CreateVM_Task, 'duration_secs': 0.381159} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.505972] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1357.506701] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.507363] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1357.507718] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1357.508008] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dbb27cc-5d3b-4c09-8285-fdad607cd29b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.514014] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1357.514014] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c894bd-7e98-c0f4-f08b-9725d2c576bb" [ 1357.514014] env[61545]: _type = "Task" [ 1357.514014] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.524939] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c894bd-7e98-c0f4-f08b-9725d2c576bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.645446] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257029, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50097} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.645792] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 55917182-ba42-4f29-aecd-134c899e3028/55917182-ba42-4f29-aecd-134c899e3028.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1357.645868] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1357.646124] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9dd235e-aa4d-4bf7-8693-42e54f1ff5df {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.653120] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1357.653120] env[61545]: value = "task-4257030" [ 1357.653120] env[61545]: _type = "Task" [ 1357.653120] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.663156] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257030, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.016099] env[61545]: DEBUG nova.network.neutron [req-a474fd7e-e4d7-4a36-b108-07afdbbef7a4 req-28c447d0-e593-4357-80e9-6f3757a0f194 service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Updated VIF entry in instance network info cache for port 2ff38b08-dfa0-4c6f-946d-b496a5074f97. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1358.016529] env[61545]: DEBUG nova.network.neutron [req-a474fd7e-e4d7-4a36-b108-07afdbbef7a4 req-28c447d0-e593-4357-80e9-6f3757a0f194 service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Updating instance_info_cache with network_info: [{"id": "2ff38b08-dfa0-4c6f-946d-b496a5074f97", "address": "fa:16:3e:4a:45:10", "network": {"id": "2ef733be-b4b2-45ee-b1f8-868b739a23bb", "bridge": "br-int", "label": "tempest-ServersTestJSON-124612760-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b790c7b2af394de28f7f42ce0d230346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a071ecf4-e713-4f97-9271-8c17952f6dee", "external-id": "nsx-vlan-transportzone-23", "segmentation_id": 23, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff38b08-df", "ovs_interfaceid": "2ff38b08-dfa0-4c6f-946d-b496a5074f97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.028467] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c894bd-7e98-c0f4-f08b-9725d2c576bb, 'name': SearchDatastore_Task, 'duration_secs': 0.057396} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.029293] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1358.029526] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1358.029759] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.029971] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1358.030193] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1358.030699] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcf06d72-1fa3-43ae-8881-6bc7bf7f7ecc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.040734] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1358.040888] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1358.041645] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d07c9e9-50ae-4fe5-9948-a9f30005cf13 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.049234] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1358.049234] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520f0bf5-786f-935e-cb51-10563eaa5e7e" [ 1358.049234] env[61545]: _type = "Task" [ 1358.049234] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.058732] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520f0bf5-786f-935e-cb51-10563eaa5e7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.163612] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074007} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.163894] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1358.164713] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30417822-fd69-43b6-9a50-2d86ece6b790 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.187242] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] 55917182-ba42-4f29-aecd-134c899e3028/55917182-ba42-4f29-aecd-134c899e3028.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1358.187551] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf6e3327-33ac-4327-b53c-494930f73964 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.209273] env[61545]: DEBUG nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1358.213428] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1358.213428] env[61545]: value = "task-4257031" [ 1358.213428] env[61545]: _type = "Task" [ 1358.213428] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.222775] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257031, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.242370] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1358.242645] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1358.242789] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1358.242979] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1358.243148] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1358.243332] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1358.243508] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1358.243672] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1358.243838] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1358.244011] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1358.244204] env[61545]: DEBUG nova.virt.hardware [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1358.245156] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615b6b10-cfc9-40d4-b89d-38af3e10db92 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.254408] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712c56ba-2867-435a-8936-7daf9d779cdf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.491252] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1358.491252] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec1810a3-98c1-4d58-b40e-bbf5f4b160f2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.500126] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1358.500126] env[61545]: value = "task-4257032" [ 1358.500126] env[61545]: _type = "Task" [ 1358.500126] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.509973] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.523036] env[61545]: DEBUG oslo_concurrency.lockutils [req-a474fd7e-e4d7-4a36-b108-07afdbbef7a4 req-28c447d0-e593-4357-80e9-6f3757a0f194 service nova] Releasing lock "refresh_cache-ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1358.559826] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520f0bf5-786f-935e-cb51-10563eaa5e7e, 'name': SearchDatastore_Task, 'duration_secs': 0.033783} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.560721] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6264db64-6b68-45b2-a114-64593a034c06 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.566405] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1358.566405] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f08429-009f-fbec-4c66-35d684a66e34" [ 1358.566405] env[61545]: _type = "Task" [ 1358.566405] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.574458] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f08429-009f-fbec-4c66-35d684a66e34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.723812] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257031, 'name': ReconfigVM_Task, 'duration_secs': 0.323917} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.724157] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Reconfigured VM instance instance-0000007b to attach disk [datastore2] 55917182-ba42-4f29-aecd-134c899e3028/55917182-ba42-4f29-aecd-134c899e3028.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1358.724798] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2a8b3ff-a93f-4a40-8531-f130671403c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.732042] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1358.732042] env[61545]: value = "task-4257033" [ 1358.732042] env[61545]: _type = "Task" [ 1358.732042] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.741087] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257033, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.782647] env[61545]: DEBUG nova.network.neutron [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Successfully updated port: 62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1359.012344] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257032, 'name': PowerOffVM_Task, 'duration_secs': 0.18227} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.012344] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1359.013253] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.014080] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11bb5db-38d1-4225-92f4-40a44daf6722 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.024035] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.024035] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-468b60e6-8220-448e-9d48-296c42003c0a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.055510] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1359.055510] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1359.055510] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Deleting the datastore file [datastore2] 983082dd-274c-4441-b386-caf775336ef0 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.055510] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ae78ee9-bbc9-4db7-baee-1141db203df5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.063836] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1359.063836] env[61545]: value = "task-4257035" [ 1359.063836] env[61545]: _type = "Task" [ 1359.063836] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.076227] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.080310] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f08429-009f-fbec-4c66-35d684a66e34, 'name': SearchDatastore_Task, 'duration_secs': 0.017311} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.080841] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.081178] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8/ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1359.081498] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9b1476f-9abe-4b2d-b877-3dff11016975 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.089824] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1359.089824] env[61545]: value = "task-4257036" [ 1359.089824] env[61545]: _type = "Task" [ 1359.089824] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.099787] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.243881] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257033, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.287251] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquiring lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.287251] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquired lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.287251] env[61545]: DEBUG nova.network.neutron [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1359.302162] env[61545]: DEBUG nova.compute.manager [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Received event network-vif-plugged-62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1359.302463] env[61545]: DEBUG oslo_concurrency.lockutils [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] Acquiring lock "da648e8c-aea3-4731-ad2b-719a15f29abf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.302583] env[61545]: DEBUG oslo_concurrency.lockutils [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] Lock "da648e8c-aea3-4731-ad2b-719a15f29abf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.302748] env[61545]: DEBUG oslo_concurrency.lockutils [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] Lock "da648e8c-aea3-4731-ad2b-719a15f29abf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.302938] env[61545]: DEBUG nova.compute.manager [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] No waiting events found dispatching network-vif-plugged-62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1359.303451] env[61545]: WARNING nova.compute.manager [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Received unexpected event network-vif-plugged-62ea8d65-9aae-4400-998b-1a364d16dfcb for instance with vm_state building and task_state spawning. [ 1359.303645] env[61545]: DEBUG nova.compute.manager [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Received event network-changed-62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1359.303914] env[61545]: DEBUG nova.compute.manager [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Refreshing instance network info cache due to event network-changed-62ea8d65-9aae-4400-998b-1a364d16dfcb. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1359.304114] env[61545]: DEBUG oslo_concurrency.lockutils [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] Acquiring lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.575193] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211741} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.575501] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1359.575693] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1359.575873] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1359.602353] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257036, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.743162] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257033, 'name': Rename_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.821620] env[61545]: DEBUG nova.network.neutron [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1359.993593] env[61545]: DEBUG nova.network.neutron [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Updating instance_info_cache with network_info: [{"id": "62ea8d65-9aae-4400-998b-1a364d16dfcb", "address": "fa:16:3e:43:cb:9a", "network": {"id": "c97507b9-3c48-43a0-8e7d-057db107550d", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1175101548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16df87c509e74681a6a9f578bcaf44db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ea8d65-9a", "ovs_interfaceid": "62ea8d65-9aae-4400-998b-1a364d16dfcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.102471] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257036, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.641128} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.102807] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8/ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1360.103138] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1360.103442] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b423c2d7-57b4-4d8e-a746-722a0714f852 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.112103] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1360.112103] env[61545]: value = "task-4257037" [ 1360.112103] env[61545]: _type = "Task" [ 1360.112103] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.122550] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257037, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.245377] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257033, 'name': Rename_Task, 'duration_secs': 1.14351} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.245651] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1360.245902] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-262a4105-0e31-4d19-9f26-7357153e9479 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.253688] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1360.253688] env[61545]: value = "task-4257038" [ 1360.253688] env[61545]: _type = "Task" [ 1360.253688] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.262439] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257038, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.497242] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Releasing lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.497629] env[61545]: DEBUG nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Instance network_info: |[{"id": "62ea8d65-9aae-4400-998b-1a364d16dfcb", "address": "fa:16:3e:43:cb:9a", "network": {"id": "c97507b9-3c48-43a0-8e7d-057db107550d", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1175101548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16df87c509e74681a6a9f578bcaf44db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ea8d65-9a", "ovs_interfaceid": "62ea8d65-9aae-4400-998b-1a364d16dfcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1360.498161] env[61545]: DEBUG oslo_concurrency.lockutils [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] Acquired lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1360.499061] env[61545]: DEBUG nova.network.neutron [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Refreshing network info cache for port 62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1360.499688] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:cb:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62ea8d65-9aae-4400-998b-1a364d16dfcb', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1360.508088] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Creating folder: Project (16df87c509e74681a6a9f578bcaf44db). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1360.509322] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5c98af8-fea2-48dd-ae2f-a0c9dd1dcea3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.524558] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Created folder: Project (16df87c509e74681a6a9f578bcaf44db) in parent group-v838542. [ 1360.524768] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Creating folder: Instances. Parent ref: group-v838877. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1360.524998] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3c3a213-11e2-4791-aeec-2a27a9d3de2f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.534322] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Created folder: Instances in parent group-v838877. [ 1360.535097] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1360.535377] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1360.535616] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f33ff7b0-16c4-423a-982d-b02447307c6d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.556799] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1360.556799] env[61545]: value = "task-4257041" [ 1360.556799] env[61545]: _type = "Task" [ 1360.556799] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.565490] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257041, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.621157] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1360.621630] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1360.621937] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1360.622288] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1360.622579] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1360.622967] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1360.623277] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1360.623598] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1360.623881] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1360.624123] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1360.624347] env[61545]: DEBUG nova.virt.hardware [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1360.625282] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5b0091-8294-404a-909b-5b5e8896dfa4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.637250] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8b0b94-6049-4c68-b68d-0e5c4f8b056a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.641308] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068019} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.641592] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1360.642867] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc90546-b327-4ad0-8997-06131ca075f4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.654414] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1360.660778] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1360.661649] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1360.662043] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b1cba4d-dc8f-45b2-ad7f-5678eeb6f501 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.696957] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8/ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1360.697852] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a71d0442-d691-492e-9f48-9ace6bbbd0db {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.714961] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1360.714961] env[61545]: value = "task-4257042" [ 1360.714961] env[61545]: _type = "Task" [ 1360.714961] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.725906] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257042, 'name': CreateVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.727950] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1360.727950] env[61545]: value = "task-4257043" [ 1360.727950] env[61545]: _type = "Task" [ 1360.727950] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.738294] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257043, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.766308] env[61545]: DEBUG oslo_vmware.api [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257038, 'name': PowerOnVM_Task, 'duration_secs': 0.481572} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.766694] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1360.766773] env[61545]: INFO nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Took 8.20 seconds to spawn the instance on the hypervisor. [ 1360.766983] env[61545]: DEBUG nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1360.767864] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc420f27-feec-419c-bd26-42ed68f441e1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.070603] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257041, 'name': CreateVM_Task, 'duration_secs': 0.388419} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.070871] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1361.071471] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.071642] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1361.072015] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1361.072292] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bfb3486-8d90-4e7b-8f21-f57adc91d7df {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.078340] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1361.078340] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5254b026-910e-c997-88c9-7b95b7e123d2" [ 1361.078340] env[61545]: _type = "Task" [ 1361.078340] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.087739] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5254b026-910e-c997-88c9-7b95b7e123d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.228138] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257042, 'name': CreateVM_Task, 'duration_secs': 0.346329} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.228428] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1361.231696] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.237572] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257043, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.254177] env[61545]: DEBUG nova.network.neutron [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Updated VIF entry in instance network info cache for port 62ea8d65-9aae-4400-998b-1a364d16dfcb. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1361.254531] env[61545]: DEBUG nova.network.neutron [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Updating instance_info_cache with network_info: [{"id": "62ea8d65-9aae-4400-998b-1a364d16dfcb", "address": "fa:16:3e:43:cb:9a", "network": {"id": "c97507b9-3c48-43a0-8e7d-057db107550d", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1175101548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16df87c509e74681a6a9f578bcaf44db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ea8d65-9a", "ovs_interfaceid": "62ea8d65-9aae-4400-998b-1a364d16dfcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.286048] env[61545]: INFO nova.compute.manager [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Took 16.02 seconds to build instance. [ 1361.589036] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5254b026-910e-c997-88c9-7b95b7e123d2, 'name': SearchDatastore_Task, 'duration_secs': 0.01074} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.589376] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1361.589612] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1361.589848] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.589996] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1361.590196] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1361.590485] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1361.590805] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1361.591059] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92863dc3-cf95-4870-a16d-2396156652ba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.593011] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5c63aaf-9681-4a4d-8005-d64d4f49e5a8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.599529] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1361.599529] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52062625-9a5e-758e-6209-47914626e499" [ 1361.599529] env[61545]: _type = "Task" [ 1361.599529] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.604319] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1361.604549] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1361.605676] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce0fabce-92e3-42e0-b75a-cdb753be04ae {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.611321] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52062625-9a5e-758e-6209-47914626e499, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.614657] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1361.614657] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5201b112-164a-99fd-03ae-832cd96abfd1" [ 1361.614657] env[61545]: _type = "Task" [ 1361.614657] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.622915] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5201b112-164a-99fd-03ae-832cd96abfd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.739019] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257043, 'name': ReconfigVM_Task, 'duration_secs': 0.652217} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.739147] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Reconfigured VM instance instance-0000007c to attach disk [datastore2] ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8/ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1361.739811] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9411082-2473-49d4-a93e-5359083905b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.746302] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1361.746302] env[61545]: value = "task-4257044" [ 1361.746302] env[61545]: _type = "Task" [ 1361.746302] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.754884] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257044, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.757527] env[61545]: DEBUG oslo_concurrency.lockutils [req-5898a699-e673-4073-99a6-ba438332980f req-d97fcf8e-9e32-4ea8-8dc4-0fa3b5cb306f service nova] Releasing lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1361.788429] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e6e2ab04-35af-43cb-bc3f-b663dda91e97 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.528s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.111034] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52062625-9a5e-758e-6209-47914626e499, 'name': SearchDatastore_Task, 'duration_secs': 0.013334} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.111362] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1362.111593] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1362.111806] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.125406] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]5201b112-164a-99fd-03ae-832cd96abfd1, 'name': SearchDatastore_Task, 'duration_secs': 0.009152} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.126302] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58128fb1-bf58-4813-9e00-c6d676a99c39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.133264] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1362.133264] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52456f9b-b5f1-33e9-9c07-2767eed04a2c" [ 1362.133264] env[61545]: _type = "Task" [ 1362.133264] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.142392] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52456f9b-b5f1-33e9-9c07-2767eed04a2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.256698] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257044, 'name': Rename_Task, 'duration_secs': 0.212882} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.256986] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1362.257261] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62246d06-0289-4d3e-b128-56a195e50b39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.264214] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1362.264214] env[61545]: value = "task-4257045" [ 1362.264214] env[61545]: _type = "Task" [ 1362.264214] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.272426] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.645999] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52456f9b-b5f1-33e9-9c07-2767eed04a2c, 'name': SearchDatastore_Task, 'duration_secs': 0.011384} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.646213] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1362.646469] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] da648e8c-aea3-4731-ad2b-719a15f29abf/da648e8c-aea3-4731-ad2b-719a15f29abf.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1362.646752] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1362.646937] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1362.647172] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0bdda42-7a98-4f2b-835b-ae8a4e58766c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.649116] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8d3d51a-42fd-4ca5-9179-2942a19b2d8d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.657448] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1362.657448] env[61545]: value = "task-4257046" [ 1362.657448] env[61545]: _type = "Task" [ 1362.657448] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.662592] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1362.662763] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1362.663907] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-560b3748-4e2c-4863-a0e2-c5b4e7cdc964 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.669566] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.673101] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1362.673101] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520ec250-d987-d408-5b35-109400a043e3" [ 1362.673101] env[61545]: _type = "Task" [ 1362.673101] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.681891] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520ec250-d987-d408-5b35-109400a043e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.775334] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257045, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.816469] env[61545]: DEBUG nova.compute.manager [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Stashing vm_state: active {{(pid=61545) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1363.169500] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257046, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.183754] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]520ec250-d987-d408-5b35-109400a043e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010084} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.184688] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06902227-91d4-4963-94a4-ffa81e71f13c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.191287] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1363.191287] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c00c5-2689-9eb7-b1de-b5e19d67c3a9" [ 1363.191287] env[61545]: _type = "Task" [ 1363.191287] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.200683] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c00c5-2689-9eb7-b1de-b5e19d67c3a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.276058] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257045, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.339142] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.339503] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.668169] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257046, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598549} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.668437] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] da648e8c-aea3-4731-ad2b-719a15f29abf/da648e8c-aea3-4731-ad2b-719a15f29abf.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1363.668656] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1363.668915] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90bb3dac-ca1e-464c-8665-d1d7f2d87b05 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.675819] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1363.675819] env[61545]: value = "task-4257047" [ 1363.675819] env[61545]: _type = "Task" [ 1363.675819] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.685344] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.701268] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528c00c5-2689-9eb7-b1de-b5e19d67c3a9, 'name': SearchDatastore_Task, 'duration_secs': 0.049321} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.701487] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1363.701682] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 983082dd-274c-4441-b386-caf775336ef0/983082dd-274c-4441-b386-caf775336ef0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1363.701971] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbc3b51d-8f35-4182-8ba9-d149b8a858f5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.708994] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1363.708994] env[61545]: value = "task-4257048" [ 1363.708994] env[61545]: _type = "Task" [ 1363.708994] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.718452] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.776089] env[61545]: DEBUG oslo_vmware.api [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257045, 'name': PowerOnVM_Task, 'duration_secs': 1.135626} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.776378] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1363.776587] env[61545]: INFO nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1363.776767] env[61545]: DEBUG nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1363.777571] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5451653f-2329-436c-894f-4ed01e3ab4d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.845486] env[61545]: INFO nova.compute.claims [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1364.187749] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075759} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.189054] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1364.189325] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e817eaa-659d-4585-81a0-c933bc28be58 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.215455] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] da648e8c-aea3-4731-ad2b-719a15f29abf/da648e8c-aea3-4731-ad2b-719a15f29abf.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1364.215877] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99ca2c1a-da31-4a05-ae39-85471d772b11 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.240930] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1364.240930] env[61545]: value = "task-4257049" [ 1364.240930] env[61545]: _type = "Task" [ 1364.240930] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.244065] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257048, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.253094] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.297921] env[61545]: INFO nova.compute.manager [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Took 18.53 seconds to build instance. [ 1364.352203] env[61545]: INFO nova.compute.resource_tracker [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating resource usage from migration 4eb61987-1e41-4be7-b8c5-49f76c121b98 [ 1364.487386] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4c3072-b344-4780-bc0c-ba149fb107d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.495979] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1127a2a0-e6db-4705-a6c1-803e9847babb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.529884] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b1dd8b-7286-4b6c-9cf0-0ba59b1d126b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.538334] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15176ca1-99f9-4576-92bd-3213bc5e7b1e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.552403] env[61545]: DEBUG nova.compute.provider_tree [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1364.726065] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257048, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563093} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.726427] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 983082dd-274c-4441-b386-caf775336ef0/983082dd-274c-4441-b386-caf775336ef0.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1364.726592] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1364.726858] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b04e92de-9c8d-4bea-8b3a-185287b5a3be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.734082] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1364.734082] env[61545]: value = "task-4257050" [ 1364.734082] env[61545]: _type = "Task" [ 1364.734082] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.743109] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257050, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.753879] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257049, 'name': ReconfigVM_Task, 'duration_secs': 0.311727} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.754121] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Reconfigured VM instance instance-0000007d to attach disk [datastore2] da648e8c-aea3-4731-ad2b-719a15f29abf/da648e8c-aea3-4731-ad2b-719a15f29abf.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1364.754722] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4591950-e1c1-4678-8a40-6f4c446356f7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.761731] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1364.761731] env[61545]: value = "task-4257051" [ 1364.761731] env[61545]: _type = "Task" [ 1364.761731] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.772286] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257051, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.799816] env[61545]: DEBUG oslo_concurrency.lockutils [None req-32e2156a-ea67-4bd6-bbb1-0cd34f113a51 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.040s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.067364] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.067803] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1365.067803] env[61545]: DEBUG nova.compute.manager [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1365.068800] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c670842-d3f2-4673-8825-632361414d0d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.072514] env[61545]: ERROR nova.scheduler.client.report [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [req-d6adf220-9686-4394-bfb5-c29b9cd7376f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7015027d-c4e1-4938-ac31-6e4672774d7e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d6adf220-9686-4394-bfb5-c29b9cd7376f"}]} [ 1365.079298] env[61545]: DEBUG nova.compute.manager [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61545) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1365.079959] env[61545]: DEBUG nova.objects.instance [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lazy-loading 'flavor' on Instance uuid ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1365.090065] env[61545]: DEBUG nova.scheduler.client.report [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Refreshing inventories for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1365.105063] env[61545]: DEBUG nova.scheduler.client.report [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating ProviderTree inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1365.105370] env[61545]: DEBUG nova.compute.provider_tree [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1365.117036] env[61545]: DEBUG nova.scheduler.client.report [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Refreshing aggregate associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, aggregates: None {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1365.136013] env[61545]: DEBUG nova.scheduler.client.report [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Refreshing trait associations for resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61545) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1365.246139] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257050, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101313} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.246382] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1365.247180] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214577b8-c257-4a41-8684-764cbb10a78e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.251099] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9217d59-d1f9-43d9-920a-af58a21ce83e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.266511] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2085436-cdc1-4416-9c48-2db0d3a4adaa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.278085] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 983082dd-274c-4441-b386-caf775336ef0/983082dd-274c-4441-b386-caf775336ef0.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1365.281121] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31e4c4a9-983f-4682-924e-1e87e5358326 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.301369] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257051, 'name': Rename_Task, 'duration_secs': 0.156762} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.325236] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1365.325629] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1365.325629] env[61545]: value = "task-4257052" [ 1365.325629] env[61545]: _type = "Task" [ 1365.325629] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.326046] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28e19987-1a5d-4a37-94cc-2ebf97ef50c0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.328406] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac0d6e4-9820-469d-8c15-662711a64ac4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.342226] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159a62ed-57c4-427c-bf9d-8c34cc0176ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.346118] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1365.346118] env[61545]: value = "task-4257053" [ 1365.346118] env[61545]: _type = "Task" [ 1365.346118] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.357933] env[61545]: DEBUG nova.compute.provider_tree [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1365.365892] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257053, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.838442] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257052, 'name': ReconfigVM_Task, 'duration_secs': 0.314902} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.838872] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 983082dd-274c-4441-b386-caf775336ef0/983082dd-274c-4441-b386-caf775336ef0.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1365.839392] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4ff2a06-4bdb-4f18-8a81-77c4b0432f16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.846950] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1365.846950] env[61545]: value = "task-4257054" [ 1365.846950] env[61545]: _type = "Task" [ 1365.846950] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.860087] env[61545]: DEBUG oslo_vmware.api [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257053, 'name': PowerOnVM_Task, 'duration_secs': 0.486616} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.865705] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1365.865940] env[61545]: INFO nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Took 7.66 seconds to spawn the instance on the hypervisor. [ 1365.866107] env[61545]: DEBUG nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1365.866393] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257054, 'name': Rename_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.867356] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade04189-558a-4e21-a5da-900904967ecf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.894150] env[61545]: DEBUG nova.scheduler.client.report [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 189 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1365.895033] env[61545]: DEBUG nova.compute.provider_tree [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 189 to 190 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1365.895033] env[61545]: DEBUG nova.compute.provider_tree [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1366.087199] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1366.087774] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-033cd432-046a-461e-8fd2-36112028a433 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.095704] env[61545]: DEBUG oslo_vmware.api [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1366.095704] env[61545]: value = "task-4257055" [ 1366.095704] env[61545]: _type = "Task" [ 1366.095704] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.105480] env[61545]: DEBUG oslo_vmware.api [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.360319] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257054, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.384053] env[61545]: INFO nova.compute.manager [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Took 12.45 seconds to build instance. [ 1366.399794] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.060s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.400073] env[61545]: INFO nova.compute.manager [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Migrating [ 1366.584682] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.584992] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.585216] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.585418] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.585609] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.588218] env[61545]: INFO nova.compute.manager [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Terminating instance [ 1366.606204] env[61545]: DEBUG oslo_vmware.api [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257055, 'name': PowerOffVM_Task, 'duration_secs': 0.226934} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.606638] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1366.606772] env[61545]: DEBUG nova.compute.manager [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1366.607746] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de80c9dd-665e-4e9f-971e-995065ebf0cd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.860981] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257054, 'name': Rename_Task, 'duration_secs': 0.849318} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.861286] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1366.861535] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c28f2f1-860f-4ca4-a82a-10fd03af871a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.869127] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1366.869127] env[61545]: value = "task-4257056" [ 1366.869127] env[61545]: _type = "Task" [ 1366.869127] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.878596] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257056, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.886369] env[61545]: DEBUG oslo_concurrency.lockutils [None req-3fca7915-bf4a-452d-aa00-28b98d239351 tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "da648e8c-aea3-4731-ad2b-719a15f29abf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.961s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.914838] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.915183] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1366.915447] env[61545]: DEBUG nova.network.neutron [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1367.092629] env[61545]: DEBUG nova.compute.manager [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1367.092912] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1367.093198] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8fc9929b-9ea7-4d66-804e-6f5920efcbd6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.103347] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1367.103347] env[61545]: value = "task-4257057" [ 1367.103347] env[61545]: _type = "Task" [ 1367.103347] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.112524] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257057, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.120545] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7a23f129-1cad-43e7-a80f-5364cf695d24 tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1367.381012] env[61545]: DEBUG oslo_vmware.api [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257056, 'name': PowerOnVM_Task, 'duration_secs': 0.505168} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.381303] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1367.381509] env[61545]: DEBUG nova.compute.manager [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1367.382586] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37084eb-cea7-4a73-811c-38af4ca1c9e4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.599553] env[61545]: DEBUG nova.compute.manager [req-88b6e34d-2546-4c11-84c6-a45b494ba1a2 req-223d84ac-eb0c-4b97-b8a3-02e7dc5dce8d service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Received event network-changed-62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1367.599803] env[61545]: DEBUG nova.compute.manager [req-88b6e34d-2546-4c11-84c6-a45b494ba1a2 req-223d84ac-eb0c-4b97-b8a3-02e7dc5dce8d service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Refreshing instance network info cache due to event network-changed-62ea8d65-9aae-4400-998b-1a364d16dfcb. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1367.600130] env[61545]: DEBUG oslo_concurrency.lockutils [req-88b6e34d-2546-4c11-84c6-a45b494ba1a2 req-223d84ac-eb0c-4b97-b8a3-02e7dc5dce8d service nova] Acquiring lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.600297] env[61545]: DEBUG oslo_concurrency.lockutils [req-88b6e34d-2546-4c11-84c6-a45b494ba1a2 req-223d84ac-eb0c-4b97-b8a3-02e7dc5dce8d service nova] Acquired lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.600461] env[61545]: DEBUG nova.network.neutron [req-88b6e34d-2546-4c11-84c6-a45b494ba1a2 req-223d84ac-eb0c-4b97-b8a3-02e7dc5dce8d service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Refreshing network info cache for port 62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1367.613965] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257057, 'name': PowerOffVM_Task, 'duration_secs': 0.249104} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.615719] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1367.616019] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Volume detach. Driver type: vmdk {{(pid=61545) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1367.616361] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838860', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'name': 'volume-f670e2ee-2d66-439c-be73-79914f3d6fd5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '4e2eb270-abf0-4734-a49f-ac0b7ee141c8', 'attached_at': '2025-06-03T12:58:04.000000', 'detached_at': '', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'serial': 'f670e2ee-2d66-439c-be73-79914f3d6fd5'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1367.617595] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111a6e74-acd4-4473-8c69-96696522d553 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.640948] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721db4d3-40bb-4ce1-b54e-21ca81365733 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.648564] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dea4db-2dd0-4a5e-ba24-8378c007b53b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.667558] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e90f488-9432-4c33-ab12-7deb288e65d6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.686331] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] The volume has not been displaced from its original location: [datastore2] volume-f670e2ee-2d66-439c-be73-79914f3d6fd5/volume-f670e2ee-2d66-439c-be73-79914f3d6fd5.vmdk. No consolidation needed. {{(pid=61545) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1367.691646] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Reconfiguring VM instance instance-00000076 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1367.694428] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66325e8d-4ec7-4b36-989e-153460bebaaf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.714379] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1367.714379] env[61545]: value = "task-4257058" [ 1367.714379] env[61545]: _type = "Task" [ 1367.714379] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.727144] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257058, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.818018] env[61545]: DEBUG nova.network.neutron [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance_info_cache with network_info: [{"id": "f859288c-a2db-438a-8bdb-5c2669003b88", "address": "fa:16:3e:aa:ff:a6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf859288c-a2", "ovs_interfaceid": "f859288c-a2db-438a-8bdb-5c2669003b88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.899339] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1367.899983] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.899983] env[61545]: DEBUG nova.objects.instance [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1368.032407] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.032742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1368.033036] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.033310] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1368.033575] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.039905] env[61545]: INFO nova.compute.manager [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Terminating instance [ 1368.227196] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257058, 'name': ReconfigVM_Task, 'duration_secs': 0.20343} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.227576] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Reconfigured VM instance instance-00000076 to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1368.232387] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34537371-eda7-41a8-be77-84bd22442c2e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.248086] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1368.248086] env[61545]: value = "task-4257059" [ 1368.248086] env[61545]: _type = "Task" [ 1368.248086] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.256642] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.319765] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1368.351875] env[61545]: DEBUG nova.network.neutron [req-88b6e34d-2546-4c11-84c6-a45b494ba1a2 req-223d84ac-eb0c-4b97-b8a3-02e7dc5dce8d service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Updated VIF entry in instance network info cache for port 62ea8d65-9aae-4400-998b-1a364d16dfcb. {{(pid=61545) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1368.352368] env[61545]: DEBUG nova.network.neutron [req-88b6e34d-2546-4c11-84c6-a45b494ba1a2 req-223d84ac-eb0c-4b97-b8a3-02e7dc5dce8d service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Updating instance_info_cache with network_info: [{"id": "62ea8d65-9aae-4400-998b-1a364d16dfcb", "address": "fa:16:3e:43:cb:9a", "network": {"id": "c97507b9-3c48-43a0-8e7d-057db107550d", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1175101548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16df87c509e74681a6a9f578bcaf44db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ea8d65-9a", "ovs_interfaceid": "62ea8d65-9aae-4400-998b-1a364d16dfcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.443550] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "983082dd-274c-4441-b386-caf775336ef0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.443873] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "983082dd-274c-4441-b386-caf775336ef0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1368.444166] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "983082dd-274c-4441-b386-caf775336ef0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.444408] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "983082dd-274c-4441-b386-caf775336ef0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1368.444626] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "983082dd-274c-4441-b386-caf775336ef0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.446973] env[61545]: INFO nova.compute.manager [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Terminating instance [ 1368.544457] env[61545]: DEBUG nova.compute.manager [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1368.544715] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1368.545816] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf6af23-c952-4e04-90db-adaa3df9f8e6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.554440] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1368.554703] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10a7a2eb-5eb6-49c5-8eca-08e56d739a49 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.630556] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1368.630871] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1368.631012] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleting the datastore file [datastore2] ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1368.631269] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1890837-a67d-4f2b-8475-a2d72232a611 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.639857] env[61545]: DEBUG oslo_vmware.api [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1368.639857] env[61545]: value = "task-4257061" [ 1368.639857] env[61545]: _type = "Task" [ 1368.639857] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.651211] env[61545]: DEBUG oslo_vmware.api [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.759821] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257059, 'name': ReconfigVM_Task, 'duration_secs': 0.153524} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.760220] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-838860', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'name': 'volume-f670e2ee-2d66-439c-be73-79914f3d6fd5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '4e2eb270-abf0-4734-a49f-ac0b7ee141c8', 'attached_at': '2025-06-03T12:58:04.000000', 'detached_at': '', 'volume_id': 'f670e2ee-2d66-439c-be73-79914f3d6fd5', 'serial': 'f670e2ee-2d66-439c-be73-79914f3d6fd5'} {{(pid=61545) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1368.760561] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1368.761502] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8015be5c-3b23-498b-9e7b-a10448f98b32 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.770074] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1368.770376] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-684f7b19-cea2-4624-85e2-a0cabd9aeef5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.835487] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1368.835788] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1368.836033] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleting the datastore file [datastore2] 4e2eb270-abf0-4734-a49f-ac0b7ee141c8 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1368.836346] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88453295-c671-4fe1-9398-c2bc0e3fa0cb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.845287] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1368.845287] env[61545]: value = "task-4257063" [ 1368.845287] env[61545]: _type = "Task" [ 1368.845287] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.855772] env[61545]: DEBUG oslo_concurrency.lockutils [req-88b6e34d-2546-4c11-84c6-a45b494ba1a2 req-223d84ac-eb0c-4b97-b8a3-02e7dc5dce8d service nova] Releasing lock "refresh_cache-da648e8c-aea3-4731-ad2b-719a15f29abf" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1368.856532] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.908169] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4755f7c3-2b82-4eba-b58e-2a70eba62dbd tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.950722] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "refresh_cache-983082dd-274c-4441-b386-caf775336ef0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.951535] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquired lock "refresh_cache-983082dd-274c-4441-b386-caf775336ef0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1368.951750] env[61545]: DEBUG nova.network.neutron [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1369.150696] env[61545]: DEBUG oslo_vmware.api [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197758} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.150964] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1369.151138] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1369.151375] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1369.151563] env[61545]: INFO nova.compute.manager [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1369.151833] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1369.151994] env[61545]: DEBUG nova.compute.manager [-] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1369.152162] env[61545]: DEBUG nova.network.neutron [-] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1369.355905] env[61545]: DEBUG oslo_vmware.api [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131943} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.356182] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1369.356418] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1369.356533] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1369.356749] env[61545]: INFO nova.compute.manager [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Took 2.26 seconds to destroy the instance on the hypervisor. [ 1369.357090] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1369.357357] env[61545]: DEBUG nova.compute.manager [-] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1369.357357] env[61545]: DEBUG nova.network.neutron [-] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1369.472826] env[61545]: DEBUG nova.network.neutron [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1369.548024] env[61545]: DEBUG nova.network.neutron [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.636280] env[61545]: DEBUG nova.compute.manager [req-61db8b62-26f5-45e9-81a8-f6820b086aa0 req-3d430197-8806-46b5-b99b-9b26e1b3473c service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Received event network-vif-deleted-2ff38b08-dfa0-4c6f-946d-b496a5074f97 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1369.636626] env[61545]: INFO nova.compute.manager [req-61db8b62-26f5-45e9-81a8-f6820b086aa0 req-3d430197-8806-46b5-b99b-9b26e1b3473c service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Neutron deleted interface 2ff38b08-dfa0-4c6f-946d-b496a5074f97; detaching it from the instance and deleting it from the info cache [ 1369.636922] env[61545]: DEBUG nova.network.neutron [req-61db8b62-26f5-45e9-81a8-f6820b086aa0 req-3d430197-8806-46b5-b99b-9b26e1b3473c service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.836852] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8013bc6e-7777-4bc4-92d1-f58874da9fe8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.856941] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance '55917182-ba42-4f29-aecd-134c899e3028' progress to 0 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1369.913826] env[61545]: DEBUG nova.network.neutron [-] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.051364] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Releasing lock "refresh_cache-983082dd-274c-4441-b386-caf775336ef0" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1370.052019] env[61545]: DEBUG nova.compute.manager [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1370.052685] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1370.053623] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1587c7a0-0df1-4394-841e-089a5fd9908d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.065083] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1370.065436] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52f81fcb-fae1-4290-a8e3-33ee2a62ac11 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.077036] env[61545]: DEBUG oslo_vmware.api [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1370.077036] env[61545]: value = "task-4257064" [ 1370.077036] env[61545]: _type = "Task" [ 1370.077036] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.084851] env[61545]: DEBUG oslo_vmware.api [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.143042] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2a88340-5851-4485-9b39-b325bf67b643 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.153737] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfdc95f-0e69-44ed-b57b-2926f0cb130f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.185607] env[61545]: DEBUG nova.compute.manager [req-61db8b62-26f5-45e9-81a8-f6820b086aa0 req-3d430197-8806-46b5-b99b-9b26e1b3473c service nova] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Detach interface failed, port_id=2ff38b08-dfa0-4c6f-946d-b496a5074f97, reason: Instance ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1370.364885] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1370.364885] env[61545]: DEBUG nova.network.neutron [-] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.365791] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2435f1ab-540b-42fc-aeeb-65abf28d5529 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.374930] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1370.374930] env[61545]: value = "task-4257065" [ 1370.374930] env[61545]: _type = "Task" [ 1370.374930] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.384323] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.417062] env[61545]: INFO nova.compute.manager [-] [instance: ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8] Took 1.26 seconds to deallocate network for instance. [ 1370.585740] env[61545]: DEBUG oslo_vmware.api [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257064, 'name': PowerOffVM_Task, 'duration_secs': 0.131654} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.586026] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1370.586210] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1370.586485] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f9e0b78-6baa-4291-a509-103fd73c5ff2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.611721] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1370.611948] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1370.612189] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Deleting the datastore file [datastore2] 983082dd-274c-4441-b386-caf775336ef0 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1370.612473] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b29d340c-2d71-4337-895a-9ae127242c2c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.618507] env[61545]: DEBUG oslo_vmware.api [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for the task: (returnval){ [ 1370.618507] env[61545]: value = "task-4257067" [ 1370.618507] env[61545]: _type = "Task" [ 1370.618507] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.626897] env[61545]: DEBUG oslo_vmware.api [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.868950] env[61545]: INFO nova.compute.manager [-] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Took 1.51 seconds to deallocate network for instance. [ 1370.884918] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257065, 'name': PowerOffVM_Task, 'duration_secs': 0.4399} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.885212] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1370.885403] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance '55917182-ba42-4f29-aecd-134c899e3028' progress to 17 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1370.925880] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.926203] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.926438] env[61545]: DEBUG nova.objects.instance [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lazy-loading 'resources' on Instance uuid ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1371.128988] env[61545]: DEBUG oslo_vmware.api [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Task: {'id': task-4257067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096877} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.129202] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1371.129422] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1371.129607] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1371.129779] env[61545]: INFO nova.compute.manager [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] [instance: 983082dd-274c-4441-b386-caf775336ef0] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1371.130080] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1371.130293] env[61545]: DEBUG nova.compute.manager [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1371.130391] env[61545]: DEBUG nova.network.neutron [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1371.145880] env[61545]: DEBUG nova.network.neutron [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1371.394351] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1371.394870] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1371.394870] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1371.395054] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1371.395126] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1371.395284] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1371.395482] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1371.395640] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1371.395802] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1371.395964] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1371.396153] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1371.403153] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ace110ce-0f60-470d-a02b-9fe863d242d5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.414122] env[61545]: INFO nova.compute.manager [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Took 0.54 seconds to detach 1 volumes for instance. [ 1371.416615] env[61545]: DEBUG nova.compute.manager [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Deleting volume: f670e2ee-2d66-439c-be73-79914f3d6fd5 {{(pid=61545) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1371.422029] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1371.422029] env[61545]: value = "task-4257068" [ 1371.422029] env[61545]: _type = "Task" [ 1371.422029] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.432924] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.579749] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb3c228-982e-4203-9401-904ff96b9a13 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.589497] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23648f19-eff0-4754-853f-556408efe63e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.621946] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd73facb-7cd0-4069-8956-03ad05ba0333 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.630696] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710cff62-f6c4-41c2-9cec-cdf84eb05a68 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.646616] env[61545]: DEBUG nova.compute.provider_tree [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.648115] env[61545]: DEBUG nova.network.neutron [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.667217] env[61545]: DEBUG nova.compute.manager [req-60088f95-b4d6-4c02-8845-3df7e84e00c6 req-a7eec4a9-091b-4a97-9154-df9f069b86cb service nova] [instance: 4e2eb270-abf0-4734-a49f-ac0b7ee141c8] Received event network-vif-deleted-9586e46e-23b8-40a2-9703-712bf31c9e96 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1371.933232] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257068, 'name': ReconfigVM_Task, 'duration_secs': 0.224249} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.933601] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance '55917182-ba42-4f29-aecd-134c899e3028' progress to 33 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1371.967690] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.151730] env[61545]: DEBUG nova.scheduler.client.report [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1372.156550] env[61545]: INFO nova.compute.manager [-] [instance: 983082dd-274c-4441-b386-caf775336ef0] Took 1.03 seconds to deallocate network for instance. [ 1372.440264] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1372.440693] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1372.440693] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1372.440808] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1372.440950] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1372.441114] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1372.441323] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1372.441478] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1372.441642] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1372.441802] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1372.441969] env[61545]: DEBUG nova.virt.hardware [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1372.447305] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1372.447617] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-829312e3-950e-4bb6-8301-56777bc83101 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.468105] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1372.468105] env[61545]: value = "task-4257070" [ 1372.468105] env[61545]: _type = "Task" [ 1372.468105] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.477097] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257070, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.658292] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.732s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.660726] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.693s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.660937] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.663635] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.663830] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.663985] env[61545]: DEBUG nova.objects.instance [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lazy-loading 'resources' on Instance uuid 983082dd-274c-4441-b386-caf775336ef0 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1372.685583] env[61545]: INFO nova.scheduler.client.report [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted allocations for instance ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8 [ 1372.687685] env[61545]: INFO nova.scheduler.client.report [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted allocations for instance 4e2eb270-abf0-4734-a49f-ac0b7ee141c8 [ 1372.978557] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257070, 'name': ReconfigVM_Task, 'duration_secs': 0.161176} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.978874] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=61545) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1372.979690] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d737ed4-aa02-4ae1-83b6-f48510c0ed6b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.002511] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] 55917182-ba42-4f29-aecd-134c899e3028/55917182-ba42-4f29-aecd-134c899e3028.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1373.003288] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-590b9f49-0aab-48ab-a2c8-3a571c8fb19d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.025600] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1373.025600] env[61545]: value = "task-4257071" [ 1373.025600] env[61545]: _type = "Task" [ 1373.025600] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.035632] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257071, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.198364] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8fadf053-de27-450c-ad01-10e8a84d3482 tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "4e2eb270-abf0-4734-a49f-ac0b7ee141c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.613s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.198806] env[61545]: DEBUG oslo_concurrency.lockutils [None req-34192185-6deb-419d-ab3d-319f6f77b6dd tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "ed41f66c-f83b-4d76-a0bc-0e4a36f1bae8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.166s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.277160] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82c4587-fb5d-4437-a60f-53c00e9f6ab9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.286618] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e6c2f5-c0de-4bfd-85ce-3fd0700b221b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.322605] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f15a5a-3e67-4554-8c82-05056432c2e3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.331542] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95fefdf0-34ec-4243-b649-ff4b057762ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.347046] env[61545]: DEBUG nova.compute.provider_tree [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.536034] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257071, 'name': ReconfigVM_Task, 'duration_secs': 0.280863} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.536586] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Reconfigured VM instance instance-0000007b to attach disk [datastore2] 55917182-ba42-4f29-aecd-134c899e3028/55917182-ba42-4f29-aecd-134c899e3028.vmdk or device None with type thin {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1373.536848] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance '55917182-ba42-4f29-aecd-134c899e3028' progress to 50 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1373.851153] env[61545]: DEBUG nova.scheduler.client.report [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 245, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1373.859328] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "99c9b4ab-efcf-4e13-bd92-c634972fe082" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1373.859702] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "99c9b4ab-efcf-4e13-bd92-c634972fe082" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1373.859810] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "99c9b4ab-efcf-4e13-bd92-c634972fe082-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1373.860024] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "99c9b4ab-efcf-4e13-bd92-c634972fe082-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1373.860201] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "99c9b4ab-efcf-4e13-bd92-c634972fe082-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.862252] env[61545]: INFO nova.compute.manager [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Terminating instance [ 1374.045636] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210e24a8-d1cc-4682-9ff9-7635f0a3b958 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.066086] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f111e07-7c82-439e-8940-df53b6943fc4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.083654] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance '55917182-ba42-4f29-aecd-134c899e3028' progress to 67 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1374.139322] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.139586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.139801] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.140016] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.140185] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.142576] env[61545]: INFO nova.compute.manager [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Terminating instance [ 1374.187537] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.354929] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.366719] env[61545]: DEBUG nova.compute.manager [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1374.366953] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1374.367861] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a63a5ae-6374-48c0-892e-4eb32c9f0110 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.377350] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1374.377701] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-037f840f-165b-4e2d-a064-2a9565864822 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.380159] env[61545]: INFO nova.scheduler.client.report [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Deleted allocations for instance 983082dd-274c-4441-b386-caf775336ef0 [ 1374.391174] env[61545]: DEBUG oslo_vmware.api [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1374.391174] env[61545]: value = "task-4257072" [ 1374.391174] env[61545]: _type = "Task" [ 1374.391174] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.401397] env[61545]: DEBUG oslo_vmware.api [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.629343] env[61545]: DEBUG nova.network.neutron [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Port f859288c-a2db-438a-8bdb-5c2669003b88 binding to destination host cpu-1 is already ACTIVE {{(pid=61545) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1374.645862] env[61545]: DEBUG nova.compute.manager [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1374.646110] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1374.647010] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e0c9e6-9e0c-45b2-af30-426e34416d60 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.655150] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1374.655468] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5d71c8f-c4e9-43c9-a930-0d794ca1b5cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.662289] env[61545]: DEBUG oslo_vmware.api [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1374.662289] env[61545]: value = "task-4257073" [ 1374.662289] env[61545]: _type = "Task" [ 1374.662289] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.671716] env[61545]: DEBUG oslo_vmware.api [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.890036] env[61545]: DEBUG oslo_concurrency.lockutils [None req-294fcf5e-6e96-4168-b1bf-dd1e4035892e tempest-ServersListShow298Test-197129038 tempest-ServersListShow298Test-197129038-project-member] Lock "983082dd-274c-4441-b386-caf775336ef0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.446s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.901936] env[61545]: DEBUG oslo_vmware.api [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257072, 'name': PowerOffVM_Task, 'duration_secs': 0.213855} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.902165] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1374.902253] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1374.902520] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9274d59-7f55-4476-afdc-914333b97455 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.138411] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1375.138645] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1375.138833] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleting the datastore file [datastore2] 99c9b4ab-efcf-4e13-bd92-c634972fe082 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1375.139105] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b34ded88-5023-4023-8dc7-97a2f209b1c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.145745] env[61545]: DEBUG oslo_vmware.api [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1375.145745] env[61545]: value = "task-4257075" [ 1375.145745] env[61545]: _type = "Task" [ 1375.145745] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.154772] env[61545]: DEBUG oslo_vmware.api [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.172702] env[61545]: DEBUG oslo_vmware.api [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257073, 'name': PowerOffVM_Task, 'duration_secs': 0.228459} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.172979] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1375.173209] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1375.173496] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81cfda82-a2d5-4464-92e2-b26e79d91461 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.187072] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1375.239684] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1375.240158] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1375.240441] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleting the datastore file [datastore2] 9a59f45b-727f-45ea-ad33-64fa23aaffe7 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1375.240835] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-251b8d34-7597-442d-918d-36314f77ea85 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.248466] env[61545]: DEBUG oslo_vmware.api [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for the task: (returnval){ [ 1375.248466] env[61545]: value = "task-4257077" [ 1375.248466] env[61545]: _type = "Task" [ 1375.248466] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.258879] env[61545]: DEBUG oslo_vmware.api [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.654920] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "55917182-ba42-4f29-aecd-134c899e3028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1375.655265] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1375.655367] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1375.662843] env[61545]: DEBUG oslo_vmware.api [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14298} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.663108] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1375.663322] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1375.663508] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1375.663682] env[61545]: INFO nova.compute.manager [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1375.663921] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1375.664408] env[61545]: DEBUG nova.compute.manager [-] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1375.664512] env[61545]: DEBUG nova.network.neutron [-] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1375.759841] env[61545]: DEBUG oslo_vmware.api [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Task: {'id': task-4257077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146581} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.760611] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1375.760875] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1375.761098] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1375.761302] env[61545]: INFO nova.compute.manager [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1375.761577] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1375.761797] env[61545]: DEBUG nova.compute.manager [-] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1375.761907] env[61545]: DEBUG nova.network.neutron [-] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1376.072922] env[61545]: DEBUG nova.compute.manager [req-508e39d5-c8fc-47dd-9f76-54e22bacc258 req-9d6545ce-dd0d-4698-b7bd-159a31b16558 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Received event network-vif-deleted-30b017cf-ed19-4ecc-a917-33a9eff869e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1376.073135] env[61545]: INFO nova.compute.manager [req-508e39d5-c8fc-47dd-9f76-54e22bacc258 req-9d6545ce-dd0d-4698-b7bd-159a31b16558 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Neutron deleted interface 30b017cf-ed19-4ecc-a917-33a9eff869e2; detaching it from the instance and deleting it from the info cache [ 1376.073307] env[61545]: DEBUG nova.network.neutron [req-508e39d5-c8fc-47dd-9f76-54e22bacc258 req-9d6545ce-dd0d-4698-b7bd-159a31b16558 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.186905] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1376.549243] env[61545]: DEBUG nova.network.neutron [-] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.577249] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9107bfb-5afc-4acb-9273-58cd21004304 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.590439] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b88e5b-030c-449d-8910-87e6ecb4a0aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.616631] env[61545]: DEBUG nova.compute.manager [req-508e39d5-c8fc-47dd-9f76-54e22bacc258 req-9d6545ce-dd0d-4698-b7bd-159a31b16558 service nova] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Detach interface failed, port_id=30b017cf-ed19-4ecc-a917-33a9eff869e2, reason: Instance 99c9b4ab-efcf-4e13-bd92-c634972fe082 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1376.667447] env[61545]: DEBUG nova.network.neutron [-] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.688728] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.688956] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1376.689154] env[61545]: DEBUG nova.network.neutron [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1376.690945] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.691181] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.691346] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.691495] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1376.692594] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57aca83-9162-4850-8140-8882f8520ca8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.703247] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b85b7c-da7d-4f72-aca1-6f74a83404cc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.717716] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65052f87-b91e-4914-8534-7dbc5f939aba {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.724690] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e236aae8-f62c-4df6-879c-877577d3d1d0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.754107] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179646MB free_disk=246GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1376.754343] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.754529] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.052065] env[61545]: INFO nova.compute.manager [-] [instance: 99c9b4ab-efcf-4e13-bd92-c634972fe082] Took 1.39 seconds to deallocate network for instance. [ 1377.170453] env[61545]: INFO nova.compute.manager [-] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Took 1.41 seconds to deallocate network for instance. [ 1377.402880] env[61545]: DEBUG nova.network.neutron [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance_info_cache with network_info: [{"id": "f859288c-a2db-438a-8bdb-5c2669003b88", "address": "fa:16:3e:aa:ff:a6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf859288c-a2", "ovs_interfaceid": "f859288c-a2db-438a-8bdb-5c2669003b88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.558684] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1377.676755] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1377.761847] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Applying migration context for instance 55917182-ba42-4f29-aecd-134c899e3028 as it has an incoming, in-progress migration 4eb61987-1e41-4be7-b8c5-49f76c121b98. Migration status is post-migrating {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1377.763191] env[61545]: INFO nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating resource usage from migration 4eb61987-1e41-4be7-b8c5-49f76c121b98 [ 1377.782694] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance d0f42893-3332-4027-93df-bb46e3350485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1377.782842] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 9a59f45b-727f-45ea-ad33-64fa23aaffe7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1377.782967] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 99c9b4ab-efcf-4e13-bd92-c634972fe082 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1377.783119] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance da648e8c-aea3-4731-ad2b-719a15f29abf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1377.783237] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Migration 4eb61987-1e41-4be7-b8c5-49f76c121b98 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1377.783354] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance 55917182-ba42-4f29-aecd-134c899e3028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1377.783535] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1377.783665] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=250GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1377.867761] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c023336a-9fc8-488c-a063-3a1ca43fd4d8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.876340] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac18a4c7-6451-470d-9dee-19da92b07857 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.909278] env[61545]: DEBUG oslo_concurrency.lockutils [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1377.913681] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334f21f5-0c4a-473a-a676-c925e6034180 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.922441] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c332192c-b7e8-4794-90a3-c2a016b757ac {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.937979] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1378.107889] env[61545]: DEBUG nova.compute.manager [req-b2d4989c-75ec-4e31-97f3-adcc95fbc5e9 req-878aa2cb-9c2f-41fb-b49d-d2dc2a52cc26 service nova] [instance: 9a59f45b-727f-45ea-ad33-64fa23aaffe7] Received event network-vif-deleted-dbcc9cc9-fdcd-497b-ad2d-743248442109 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1378.435487] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b917d6d4-bb93-41cb-8a6d-db586f4270b0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.457400] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab95c64f-8b33-4544-82d6-6cc95ef94323 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.465035] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance '55917182-ba42-4f29-aecd-134c899e3028' progress to 83 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1378.469416] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updated inventory for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with generation 190 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1378.469640] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating resource provider 7015027d-c4e1-4938-ac31-6e4672774d7e generation from 190 to 191 during operation: update_inventory {{(pid=61545) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1378.469793] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Updating inventory in ProviderTree for provider 7015027d-c4e1-4938-ac31-6e4672774d7e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1378.979053] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1378.979621] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1378.979800] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.225s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1378.980043] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad59317f-15da-4e5e-bd3f-47a5985da095 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.981910] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.423s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1378.982253] env[61545]: DEBUG nova.objects.instance [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'resources' on Instance uuid 99c9b4ab-efcf-4e13-bd92-c634972fe082 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1378.989138] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1378.989138] env[61545]: value = "task-4257078" [ 1378.989138] env[61545]: _type = "Task" [ 1378.989138] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.998503] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.499738] env[61545]: DEBUG oslo_vmware.api [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257078, 'name': PowerOnVM_Task, 'duration_secs': 0.369326} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.499960] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1379.500253] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-8c9d94fe-cdcc-4a55-87af-f71d70d01777 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance '55917182-ba42-4f29-aecd-134c899e3028' progress to 100 {{(pid=61545) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1379.575941] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56493aa6-af0a-486e-b913-4d085a82bc9b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.584123] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28df0c89-104f-4947-a02d-9f1f18a3c721 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.614088] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f094db5-0fd2-49d1-9721-88e6794d700b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.621679] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab7e82d-003d-42be-bcef-b413d3aa6d90 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.637960] env[61545]: DEBUG nova.compute.provider_tree [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.983301] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.983757] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.983757] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.983886] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1380.141739] env[61545]: DEBUG nova.scheduler.client.report [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1380.647758] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.650651] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.974s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1380.650912] env[61545]: DEBUG nova.objects.instance [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lazy-loading 'resources' on Instance uuid 9a59f45b-727f-45ea-ad33-64fa23aaffe7 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.673163] env[61545]: INFO nova.scheduler.client.report [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted allocations for instance 99c9b4ab-efcf-4e13-bd92-c634972fe082 [ 1381.181374] env[61545]: DEBUG oslo_concurrency.lockutils [None req-d9e10bba-b6e6-4ee9-9380-f27c74f0190c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "99c9b4ab-efcf-4e13-bd92-c634972fe082" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.322s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1381.235271] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b250f0-9546-4464-906d-8bcd964e80de {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.243287] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb670f3-b430-41f2-97a4-979d3f7c4458 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.273471] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a32734-17b3-404d-88fb-aa4fe3071cfd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.281389] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804a40d4-0f55-49b4-aaf5-6c7351a5f95b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.296848] env[61545]: DEBUG nova.compute.provider_tree [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.464140] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "55917182-ba42-4f29-aecd-134c899e3028" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1381.464464] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1381.464627] env[61545]: DEBUG nova.compute.manager [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Going to confirm migration 10 {{(pid=61545) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1381.799870] env[61545]: DEBUG nova.scheduler.client.report [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1382.001675] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.001869] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1382.002061] env[61545]: DEBUG nova.network.neutron [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.002308] env[61545]: DEBUG nova.objects.instance [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lazy-loading 'info_cache' on Instance uuid 55917182-ba42-4f29-aecd-134c899e3028 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1382.186826] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.304755] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.654s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.330081] env[61545]: INFO nova.scheduler.client.report [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Deleted allocations for instance 9a59f45b-727f-45ea-ad33-64fa23aaffe7 [ 1382.838134] env[61545]: DEBUG oslo_concurrency.lockutils [None req-be9fb1b7-583b-4c34-b552-87a35362891e tempest-ServersTestJSON-512759938 tempest-ServersTestJSON-512759938-project-member] Lock "9a59f45b-727f-45ea-ad33-64fa23aaffe7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.698s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.189599] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.236453] env[61545]: DEBUG nova.network.neutron [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance_info_cache with network_info: [{"id": "f859288c-a2db-438a-8bdb-5c2669003b88", "address": "fa:16:3e:aa:ff:a6", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf859288c-a2", "ovs_interfaceid": "f859288c-a2db-438a-8bdb-5c2669003b88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.740571] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-55917182-ba42-4f29-aecd-134c899e3028" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1383.740571] env[61545]: DEBUG nova.objects.instance [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lazy-loading 'migration_context' on Instance uuid 55917182-ba42-4f29-aecd-134c899e3028 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1384.244077] env[61545]: DEBUG nova.objects.base [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Object Instance<55917182-ba42-4f29-aecd-134c899e3028> lazy-loaded attributes: info_cache,migration_context {{(pid=61545) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1384.244827] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44d7ee7-aa0c-4ee4-a6e9-1469f478e866 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.266420] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df1bdbb2-805e-475b-926e-b7571a6650d7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.272875] env[61545]: DEBUG oslo_vmware.api [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1384.272875] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52836300-d5bc-2e7b-d949-7b347d9c1c1c" [ 1384.272875] env[61545]: _type = "Task" [ 1384.272875] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.281310] env[61545]: DEBUG oslo_vmware.api [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52836300-d5bc-2e7b-d949-7b347d9c1c1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.690363] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "d0f42893-3332-4027-93df-bb46e3350485" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.690586] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "d0f42893-3332-4027-93df-bb46e3350485" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.690864] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "d0f42893-3332-4027-93df-bb46e3350485-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.691084] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "d0f42893-3332-4027-93df-bb46e3350485-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.691278] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "d0f42893-3332-4027-93df-bb46e3350485-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.693659] env[61545]: INFO nova.compute.manager [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Terminating instance [ 1384.783171] env[61545]: DEBUG oslo_vmware.api [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52836300-d5bc-2e7b-d949-7b347d9c1c1c, 'name': SearchDatastore_Task, 'duration_secs': 0.009444} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.783520] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.783770] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.197558] env[61545]: DEBUG nova.compute.manager [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1385.197869] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1385.199020] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0516eaec-1ba9-41c0-8b6d-b407375ba0ef {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.207700] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1385.207959] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f7b7593-c764-44d6-a99c-3530767a9782 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.215017] env[61545]: DEBUG oslo_vmware.api [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1385.215017] env[61545]: value = "task-4257080" [ 1385.215017] env[61545]: _type = "Task" [ 1385.215017] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.224326] env[61545]: DEBUG oslo_vmware.api [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257080, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.365251] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b6dfec-6613-469e-af9e-c40a8820b63a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.374029] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c39d995-9a39-40e1-974b-ef9016d6fa69 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.407959] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de76b08-ff66-4d5d-8945-7762c28d7dc3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.416507] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c42ad3-b94f-420d-b8c9-0259f4af642b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.432724] env[61545]: DEBUG nova.compute.provider_tree [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1385.725562] env[61545]: DEBUG oslo_vmware.api [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257080, 'name': PowerOffVM_Task, 'duration_secs': 0.256289} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.725847] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1385.725933] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.726267] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d08ce65-bc76-4c9c-aca4-acc9449ec6ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.791133] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1385.791380] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1385.791573] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleting the datastore file [datastore2] d0f42893-3332-4027-93df-bb46e3350485 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1385.791867] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26a04dc3-7c3c-4f71-9431-64c54bd4e0b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.799242] env[61545]: DEBUG oslo_vmware.api [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for the task: (returnval){ [ 1385.799242] env[61545]: value = "task-4257082" [ 1385.799242] env[61545]: _type = "Task" [ 1385.799242] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.807811] env[61545]: DEBUG oslo_vmware.api [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.936409] env[61545]: DEBUG nova.scheduler.client.report [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1386.311957] env[61545]: DEBUG oslo_vmware.api [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Task: {'id': task-4257082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12959} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.312300] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1386.312558] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1386.312797] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1386.313083] env[61545]: INFO nova.compute.manager [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] [instance: d0f42893-3332-4027-93df-bb46e3350485] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1386.313463] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1386.313738] env[61545]: DEBUG nova.compute.manager [-] [instance: d0f42893-3332-4027-93df-bb46e3350485] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1386.313863] env[61545]: DEBUG nova.network.neutron [-] [instance: d0f42893-3332-4027-93df-bb46e3350485] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1386.622077] env[61545]: DEBUG nova.compute.manager [req-55678e9a-f21f-4f47-bdea-5e9ba63579c4 req-83846599-ab96-4779-9b3b-c50c568061c0 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Received event network-vif-deleted-aacdaf9b-9518-4298-a1df-ce1c3931e8e2 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1386.622393] env[61545]: INFO nova.compute.manager [req-55678e9a-f21f-4f47-bdea-5e9ba63579c4 req-83846599-ab96-4779-9b3b-c50c568061c0 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Neutron deleted interface aacdaf9b-9518-4298-a1df-ce1c3931e8e2; detaching it from the instance and deleting it from the info cache [ 1386.622640] env[61545]: DEBUG nova.network.neutron [req-55678e9a-f21f-4f47-bdea-5e9ba63579c4 req-83846599-ab96-4779-9b3b-c50c568061c0 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.947338] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.163s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.106324] env[61545]: DEBUG nova.network.neutron [-] [instance: d0f42893-3332-4027-93df-bb46e3350485] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.125547] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fe2aaa6-274e-4e7a-ac91-c0668dcd5f01 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.136227] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f4dce4-6c0b-43df-8d1d-54c077f49d4c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.163102] env[61545]: DEBUG nova.compute.manager [req-55678e9a-f21f-4f47-bdea-5e9ba63579c4 req-83846599-ab96-4779-9b3b-c50c568061c0 service nova] [instance: d0f42893-3332-4027-93df-bb46e3350485] Detach interface failed, port_id=aacdaf9b-9518-4298-a1df-ce1c3931e8e2, reason: Instance d0f42893-3332-4027-93df-bb46e3350485 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1387.515096] env[61545]: INFO nova.scheduler.client.report [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted allocation for migration 4eb61987-1e41-4be7-b8c5-49f76c121b98 [ 1387.610369] env[61545]: INFO nova.compute.manager [-] [instance: d0f42893-3332-4027-93df-bb46e3350485] Took 1.30 seconds to deallocate network for instance. [ 1388.021647] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ac8574b-818e-42d1-aa24-2ba89eae2606 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.557s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.118799] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1388.119117] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1388.119352] env[61545]: DEBUG nova.objects.instance [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lazy-loading 'resources' on Instance uuid d0f42893-3332-4027-93df-bb46e3350485 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1388.553458] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "55917182-ba42-4f29-aecd-134c899e3028" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1388.553926] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1388.554228] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "55917182-ba42-4f29-aecd-134c899e3028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1388.554468] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1388.554677] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.556934] env[61545]: INFO nova.compute.manager [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Terminating instance [ 1388.688570] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6f72e6-f719-4adf-9a62-246d4c098103 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.696662] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4ee357-5833-46a1-8213-0b9d654d734c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.728504] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e813f586-e08b-48a6-a414-911dae55684b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.736830] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c1a2f0-4167-4fb1-b8ab-8f21a11bb459 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.751724] env[61545]: DEBUG nova.compute.provider_tree [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.061116] env[61545]: DEBUG nova.compute.manager [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1389.061399] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1389.062373] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf525a7-b15a-44a0-8aba-c6ccf505325f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.069509] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.069738] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.074480] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1389.074747] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f3ccf5b-1033-4746-ab8a-76d97ede15df {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.081892] env[61545]: DEBUG oslo_vmware.api [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1389.081892] env[61545]: value = "task-4257083" [ 1389.081892] env[61545]: _type = "Task" [ 1389.081892] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.090885] env[61545]: DEBUG oslo_vmware.api [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.255360] env[61545]: DEBUG nova.scheduler.client.report [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1389.572586] env[61545]: DEBUG nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1389.591957] env[61545]: DEBUG oslo_vmware.api [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257083, 'name': PowerOffVM_Task, 'duration_secs': 0.197342} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.592245] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1389.592439] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1389.592697] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd34bba8-6419-4895-9c4a-6b45d49cbe00 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.652044] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1389.652336] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1389.652540] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleting the datastore file [datastore2] 55917182-ba42-4f29-aecd-134c899e3028 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1389.652815] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-483c8789-40c3-4262-9f75-2775b26aab3a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.661478] env[61545]: DEBUG oslo_vmware.api [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1389.661478] env[61545]: value = "task-4257085" [ 1389.661478] env[61545]: _type = "Task" [ 1389.661478] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.672505] env[61545]: DEBUG oslo_vmware.api [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.760787] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.641s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.783508] env[61545]: INFO nova.scheduler.client.report [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Deleted allocations for instance d0f42893-3332-4027-93df-bb46e3350485 [ 1390.093221] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.093523] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1390.094974] env[61545]: INFO nova.compute.claims [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1390.171684] env[61545]: DEBUG oslo_vmware.api [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136006} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.172017] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1390.172231] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1390.172443] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1390.172624] env[61545]: INFO nova.compute.manager [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1390.172863] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1390.173062] env[61545]: DEBUG nova.compute.manager [-] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1390.173160] env[61545]: DEBUG nova.network.neutron [-] [instance: 55917182-ba42-4f29-aecd-134c899e3028] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1390.291774] env[61545]: DEBUG oslo_concurrency.lockutils [None req-73b18eec-2f39-4ac9-86bc-bbb6d54b0e6c tempest-ServerActionsTestOtherA-1294261184 tempest-ServerActionsTestOtherA-1294261184-project-member] Lock "d0f42893-3332-4027-93df-bb46e3350485" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.601s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1390.455949] env[61545]: DEBUG nova.compute.manager [req-679bd413-65db-4f64-84e6-b2dda499b52d req-049c6d28-019a-4d15-afdd-fa4dd10dc106 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Received event network-vif-deleted-f859288c-a2db-438a-8bdb-5c2669003b88 {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1390.456243] env[61545]: INFO nova.compute.manager [req-679bd413-65db-4f64-84e6-b2dda499b52d req-049c6d28-019a-4d15-afdd-fa4dd10dc106 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Neutron deleted interface f859288c-a2db-438a-8bdb-5c2669003b88; detaching it from the instance and deleting it from the info cache [ 1390.456475] env[61545]: DEBUG nova.network.neutron [req-679bd413-65db-4f64-84e6-b2dda499b52d req-049c6d28-019a-4d15-afdd-fa4dd10dc106 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.939329] env[61545]: DEBUG nova.network.neutron [-] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.958862] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bea7392c-a593-44bc-9330-d475164b3fa3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.969153] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9456d13e-6ab1-4640-8bc1-6e52829a19c9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.994646] env[61545]: DEBUG nova.compute.manager [req-679bd413-65db-4f64-84e6-b2dda499b52d req-049c6d28-019a-4d15-afdd-fa4dd10dc106 service nova] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Detach interface failed, port_id=f859288c-a2db-438a-8bdb-5c2669003b88, reason: Instance 55917182-ba42-4f29-aecd-134c899e3028 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1391.160404] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e28301-8b40-4fb1-8aab-04385c939402 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.168598] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e94a40-7055-4c57-996c-eb540895fd7f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.199722] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd9d075-37ca-468f-b8af-4546ce71a220 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.207538] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514f3359-a21f-4cc4-b560-73d105d96b08 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.221073] env[61545]: DEBUG nova.compute.provider_tree [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1391.441541] env[61545]: INFO nova.compute.manager [-] [instance: 55917182-ba42-4f29-aecd-134c899e3028] Took 1.27 seconds to deallocate network for instance. [ 1391.724362] env[61545]: DEBUG nova.scheduler.client.report [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1391.948170] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.228955] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.135s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.229506] env[61545]: DEBUG nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1392.232402] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.285s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.232613] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.261747] env[61545]: INFO nova.scheduler.client.report [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted allocations for instance 55917182-ba42-4f29-aecd-134c899e3028 [ 1392.737207] env[61545]: DEBUG nova.compute.utils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1392.738761] env[61545]: DEBUG nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Allocating IP information in the background. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1392.738991] env[61545]: DEBUG nova.network.neutron [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] allocate_for_instance() {{(pid=61545) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1392.771092] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e53c90ea-a8b9-4601-9734-c2b0fedc8f39 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "55917182-ba42-4f29-aecd-134c899e3028" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.217s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.787476] env[61545]: DEBUG nova.policy [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4333754ae4a4e26bab98dfe1853e667', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b64f16b672ff471ba1d48aa2490b9829', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61545) authorize /opt/stack/nova/nova/policy.py:192}} [ 1393.082403] env[61545]: DEBUG nova.network.neutron [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Successfully created port: 73eaa178-06d6-45cf-bf15-28ff642a115e {{(pid=61545) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1393.242598] env[61545]: DEBUG nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1394.021163] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "bc5f20b7-6ad2-4370-9691-9106fa87daba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.021421] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "bc5f20b7-6ad2-4370-9691-9106fa87daba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.251508] env[61545]: DEBUG nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1394.278009] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1394.278302] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1394.278506] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1394.278720] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1394.278870] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1394.279038] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1394.279343] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1394.279596] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1394.279822] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1394.279996] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1394.280223] env[61545]: DEBUG nova.virt.hardware [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1394.281461] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d558eb2-3418-4c38-8542-4ef858185818 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.290984] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60717c7f-62c4-416b-a1c5-fe5cf9832f0c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.479044] env[61545]: DEBUG nova.compute.manager [req-c4d189d6-f1fc-4f3f-b77e-d14e449194a6 req-707b67b4-4f26-4ef0-ad75-76bf445e00de service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Received event network-vif-plugged-73eaa178-06d6-45cf-bf15-28ff642a115e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1394.479044] env[61545]: DEBUG oslo_concurrency.lockutils [req-c4d189d6-f1fc-4f3f-b77e-d14e449194a6 req-707b67b4-4f26-4ef0-ad75-76bf445e00de service nova] Acquiring lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.479044] env[61545]: DEBUG oslo_concurrency.lockutils [req-c4d189d6-f1fc-4f3f-b77e-d14e449194a6 req-707b67b4-4f26-4ef0-ad75-76bf445e00de service nova] Lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.479044] env[61545]: DEBUG oslo_concurrency.lockutils [req-c4d189d6-f1fc-4f3f-b77e-d14e449194a6 req-707b67b4-4f26-4ef0-ad75-76bf445e00de service nova] Lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.479044] env[61545]: DEBUG nova.compute.manager [req-c4d189d6-f1fc-4f3f-b77e-d14e449194a6 req-707b67b4-4f26-4ef0-ad75-76bf445e00de service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] No waiting events found dispatching network-vif-plugged-73eaa178-06d6-45cf-bf15-28ff642a115e {{(pid=61545) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1394.479044] env[61545]: WARNING nova.compute.manager [req-c4d189d6-f1fc-4f3f-b77e-d14e449194a6 req-707b67b4-4f26-4ef0-ad75-76bf445e00de service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Received unexpected event network-vif-plugged-73eaa178-06d6-45cf-bf15-28ff642a115e for instance with vm_state building and task_state spawning. [ 1394.523680] env[61545]: DEBUG nova.compute.manager [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1395.048171] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.048810] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.050518] env[61545]: INFO nova.compute.claims [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1395.053869] env[61545]: DEBUG nova.network.neutron [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Successfully updated port: 73eaa178-06d6-45cf-bf15-28ff642a115e {{(pid=61545) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1395.070209] env[61545]: DEBUG nova.compute.manager [req-72dcf34b-8bbb-4ae7-9ffd-42694c3142df req-3e9c75ad-8611-41e7-9124-0c1bd4a77f8d service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Received event network-changed-73eaa178-06d6-45cf-bf15-28ff642a115e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1395.070591] env[61545]: DEBUG nova.compute.manager [req-72dcf34b-8bbb-4ae7-9ffd-42694c3142df req-3e9c75ad-8611-41e7-9124-0c1bd4a77f8d service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Refreshing instance network info cache due to event network-changed-73eaa178-06d6-45cf-bf15-28ff642a115e. {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1395.070863] env[61545]: DEBUG oslo_concurrency.lockutils [req-72dcf34b-8bbb-4ae7-9ffd-42694c3142df req-3e9c75ad-8611-41e7-9124-0c1bd4a77f8d service nova] Acquiring lock "refresh_cache-7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.071081] env[61545]: DEBUG oslo_concurrency.lockutils [req-72dcf34b-8bbb-4ae7-9ffd-42694c3142df req-3e9c75ad-8611-41e7-9124-0c1bd4a77f8d service nova] Acquired lock "refresh_cache-7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1395.071292] env[61545]: DEBUG nova.network.neutron [req-72dcf34b-8bbb-4ae7-9ffd-42694c3142df req-3e9c75ad-8611-41e7-9124-0c1bd4a77f8d service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Refreshing network info cache for port 73eaa178-06d6-45cf-bf15-28ff642a115e {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1395.558376] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "refresh_cache-7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.604039] env[61545]: DEBUG nova.network.neutron [req-72dcf34b-8bbb-4ae7-9ffd-42694c3142df req-3e9c75ad-8611-41e7-9124-0c1bd4a77f8d service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.691520] env[61545]: DEBUG nova.network.neutron [req-72dcf34b-8bbb-4ae7-9ffd-42694c3142df req-3e9c75ad-8611-41e7-9124-0c1bd4a77f8d service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.119310] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f0b941-d762-49d5-8a46-ec3d4c9f3659 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.127453] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5548e62-1e61-4578-8550-719b8f3b2ea0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.159552] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67980508-67b1-4c90-8544-7b0d419b99d9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.167557] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de42dcb8-e7e5-429e-81de-98f4fb8f8222 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.181010] env[61545]: DEBUG nova.compute.provider_tree [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.194087] env[61545]: DEBUG oslo_concurrency.lockutils [req-72dcf34b-8bbb-4ae7-9ffd-42694c3142df req-3e9c75ad-8611-41e7-9124-0c1bd4a77f8d service nova] Releasing lock "refresh_cache-7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1396.194460] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "refresh_cache-7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1396.194637] env[61545]: DEBUG nova.network.neutron [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1396.684124] env[61545]: DEBUG nova.scheduler.client.report [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1396.725182] env[61545]: DEBUG nova.network.neutron [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1396.848277] env[61545]: DEBUG nova.network.neutron [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Updating instance_info_cache with network_info: [{"id": "73eaa178-06d6-45cf-bf15-28ff642a115e", "address": "fa:16:3e:13:80:a7", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73eaa178-06", "ovs_interfaceid": "73eaa178-06d6-45cf-bf15-28ff642a115e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.189330] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1397.189907] env[61545]: DEBUG nova.compute.manager [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1397.351233] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "refresh_cache-7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.351563] env[61545]: DEBUG nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Instance network_info: |[{"id": "73eaa178-06d6-45cf-bf15-28ff642a115e", "address": "fa:16:3e:13:80:a7", "network": {"id": "82c552e6-8741-4134-ab57-240c59f6069b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1783135341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b64f16b672ff471ba1d48aa2490b9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73eaa178-06", "ovs_interfaceid": "73eaa178-06d6-45cf-bf15-28ff642a115e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1397.352021] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:80:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73eaa178-06d6-45cf-bf15-28ff642a115e', 'vif_model': 'vmxnet3'}] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1397.359759] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1397.360388] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1397.360670] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03445e1d-14a7-461a-85a7-73506f8712fd {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.380810] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1397.380810] env[61545]: value = "task-4257086" [ 1397.380810] env[61545]: _type = "Task" [ 1397.380810] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.388966] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257086, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.694758] env[61545]: DEBUG nova.compute.utils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1397.696291] env[61545]: DEBUG nova.compute.manager [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1397.891587] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257086, 'name': CreateVM_Task, 'duration_secs': 0.32632} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.891766] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.892469] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.892640] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.892974] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1397.893255] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d7bf681-6296-46d9-be4f-aa0f8dc57794 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.897964] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1397.897964] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52478293-012a-6527-2f69-b4a813ef3401" [ 1397.897964] env[61545]: _type = "Task" [ 1397.897964] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.906015] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52478293-012a-6527-2f69-b4a813ef3401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.197831] env[61545]: DEBUG nova.compute.manager [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1398.410012] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52478293-012a-6527-2f69-b4a813ef3401, 'name': SearchDatastore_Task, 'duration_secs': 0.009702} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.410336] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.410570] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1398.410826] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.410975] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1398.411186] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.411448] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3e95db7-02c4-4629-a892-b09f0c5f389b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.420043] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1398.420233] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1398.420923] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-740e154a-a9a8-4bfb-87a4-fcb838342e48 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.426148] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1398.426148] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52835742-6a9e-6a5d-24ca-17715f9f9899" [ 1398.426148] env[61545]: _type = "Task" [ 1398.426148] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.433493] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52835742-6a9e-6a5d-24ca-17715f9f9899, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.937102] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52835742-6a9e-6a5d-24ca-17715f9f9899, 'name': SearchDatastore_Task, 'duration_secs': 0.009168} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.937883] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-912d54ca-87e9-4c4e-b84d-ae09d5e730ff {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.943013] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1398.943013] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e1f171-4c51-f8d7-5620-64a188f24381" [ 1398.943013] env[61545]: _type = "Task" [ 1398.943013] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.950480] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e1f171-4c51-f8d7-5620-64a188f24381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.207551] env[61545]: DEBUG nova.compute.manager [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1399.237691] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1399.237953] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1399.238125] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1399.238314] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1399.238460] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1399.238606] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1399.238815] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1399.238973] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1399.239150] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1399.239310] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1399.239481] env[61545]: DEBUG nova.virt.hardware [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1399.240353] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcac9cc-9f3f-4c1d-a05c-bcc21cf4cea4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.248431] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2464c74-e45c-4028-b7ef-d48678a45a7a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.262294] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1399.267757] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Creating folder: Project (5f7c9f8aee0a4ec2a199b7e07cdd0fcd). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1399.268014] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f9466bf-aa4b-40fb-8b64-8827a333f061 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.279372] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Created folder: Project (5f7c9f8aee0a4ec2a199b7e07cdd0fcd) in parent group-v838542. [ 1399.279553] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Creating folder: Instances. Parent ref: group-v838882. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1399.279770] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-434735d4-2d4d-48f5-b254-672089875439 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.288926] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Created folder: Instances in parent group-v838882. [ 1399.289196] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1399.289397] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1399.289594] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-311fc0a4-0680-46c1-aea7-854bc3de4772 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.307522] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1399.307522] env[61545]: value = "task-4257089" [ 1399.307522] env[61545]: _type = "Task" [ 1399.307522] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.314924] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257089, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.454637] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52e1f171-4c51-f8d7-5620-64a188f24381, 'name': SearchDatastore_Task, 'duration_secs': 0.009349} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.454900] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1399.455178] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7/7c631ea9-26eb-4e36-9b97-6ef372e9f6a7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1399.455457] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f104d1a-2f2c-4bbc-92e0-f170547069fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.462342] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1399.462342] env[61545]: value = "task-4257090" [ 1399.462342] env[61545]: _type = "Task" [ 1399.462342] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.475558] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.818023] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257089, 'name': CreateVM_Task, 'duration_secs': 0.275902} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.818243] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.818697] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.818861] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1399.819226] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1399.819500] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b4fd881-5458-4550-b250-069216040e76 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.826521] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1399.826521] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eff16c-2dd6-ab7f-ba4e-576eebb41912" [ 1399.826521] env[61545]: _type = "Task" [ 1399.826521] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.837156] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eff16c-2dd6-ab7f-ba4e-576eebb41912, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.974117] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459778} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.974562] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7/7c631ea9-26eb-4e36-9b97-6ef372e9f6a7.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1399.974659] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1399.974827] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-404a553e-6dab-4e2c-89d2-9f9ae43d78aa {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.981411] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1399.981411] env[61545]: value = "task-4257091" [ 1399.981411] env[61545]: _type = "Task" [ 1399.981411] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.989144] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257091, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.337365] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52eff16c-2dd6-ab7f-ba4e-576eebb41912, 'name': SearchDatastore_Task, 'duration_secs': 0.048311} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.337628] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1400.337888] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.338189] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.338345] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1400.338526] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1400.338785] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4dec267-00a4-41f0-89a3-f17c58733de7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.348525] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.348760] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1400.349576] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a9c978a-467f-482c-b20d-98658c2518fe {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.356828] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1400.356828] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527829bd-f5e9-93df-b0c4-2951db909f34" [ 1400.356828] env[61545]: _type = "Task" [ 1400.356828] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.365419] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527829bd-f5e9-93df-b0c4-2951db909f34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.490704] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257091, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108728} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.490963] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1400.491782] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c683cc-b76c-4b31-9146-aecc212c8ca7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.513660] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7/7c631ea9-26eb-4e36-9b97-6ef372e9f6a7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1400.514077] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dab98c27-3abe-4fd5-8ab3-6ef406958291 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.533356] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1400.533356] env[61545]: value = "task-4257092" [ 1400.533356] env[61545]: _type = "Task" [ 1400.533356] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.542424] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257092, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.867202] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]527829bd-f5e9-93df-b0c4-2951db909f34, 'name': SearchDatastore_Task, 'duration_secs': 0.025318} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.867905] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9538f84-d664-4898-95d8-fcbd01acad5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.873616] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1400.873616] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a6d4b7-6f80-e23d-e0a2-afefea07376f" [ 1400.873616] env[61545]: _type = "Task" [ 1400.873616] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.881533] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a6d4b7-6f80-e23d-e0a2-afefea07376f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.042807] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257092, 'name': ReconfigVM_Task, 'duration_secs': 0.26802} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.043194] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7/7c631ea9-26eb-4e36-9b97-6ef372e9f6a7.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1401.043731] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67726611-e0e0-4201-8768-ee4a3659d521 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.050055] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1401.050055] env[61545]: value = "task-4257093" [ 1401.050055] env[61545]: _type = "Task" [ 1401.050055] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.058251] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257093, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.383704] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52a6d4b7-6f80-e23d-e0a2-afefea07376f, 'name': SearchDatastore_Task, 'duration_secs': 0.010059} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.383987] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1401.384258] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba/bc5f20b7-6ad2-4370-9691-9106fa87daba.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1401.384570] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3eebfb5-abd8-486b-829c-47b23bdc2dbf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.390964] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1401.390964] env[61545]: value = "task-4257094" [ 1401.390964] env[61545]: _type = "Task" [ 1401.390964] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.398627] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.560764] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257093, 'name': Rename_Task, 'duration_secs': 0.157627} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.561070] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1401.561343] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c938cb3-037c-4d2f-b600-6e68c7b98ace {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.570648] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1401.570648] env[61545]: value = "task-4257095" [ 1401.570648] env[61545]: _type = "Task" [ 1401.570648] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.580603] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.901085] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257094, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463656} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.901380] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba/bc5f20b7-6ad2-4370-9691-9106fa87daba.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1401.901654] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1401.901943] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2d3184d-b708-48cc-b874-53ebea2e1ffc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.909580] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1401.909580] env[61545]: value = "task-4257096" [ 1401.909580] env[61545]: _type = "Task" [ 1401.909580] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.917858] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257096, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.081293] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257095, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.420311] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257096, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063164} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.420636] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1402.421555] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c45f167-9660-410d-af4c-8fb881928291 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.444555] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba/bc5f20b7-6ad2-4370-9691-9106fa87daba.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.444869] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dd13e86-80c9-4565-b4cb-eb2522c79747 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.464929] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1402.464929] env[61545]: value = "task-4257097" [ 1402.464929] env[61545]: _type = "Task" [ 1402.464929] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.473298] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257097, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.580966] env[61545]: DEBUG oslo_vmware.api [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257095, 'name': PowerOnVM_Task, 'duration_secs': 0.581962} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.581237] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1402.581412] env[61545]: INFO nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1402.581598] env[61545]: DEBUG nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1402.582421] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef166e5d-b0ae-461c-a4ff-af9da3ea9465 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.975638] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257097, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.099246] env[61545]: INFO nova.compute.manager [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Took 13.02 seconds to build instance. [ 1403.475268] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257097, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.601416] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2ae6dcb0-7906-41c1-ba15-b6bebc5f416a tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.531s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.977575] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257097, 'name': ReconfigVM_Task, 'duration_secs': 1.273291} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.977983] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Reconfigured VM instance instance-0000007f to attach disk [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba/bc5f20b7-6ad2-4370-9691-9106fa87daba.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1403.979070] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a83434eb-c869-47dc-8ee9-a68045e18853 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.987412] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1403.987412] env[61545]: value = "task-4257098" [ 1403.987412] env[61545]: _type = "Task" [ 1403.987412] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.997590] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257098, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.210349] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.210719] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.210796] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.211596] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.211596] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.213853] env[61545]: INFO nova.compute.manager [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Terminating instance [ 1404.498866] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257098, 'name': Rename_Task, 'duration_secs': 0.150165} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.499134] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.499416] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83790e3c-f4e6-44d6-9e09-c0d76a8d0eed {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.507484] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1404.507484] env[61545]: value = "task-4257099" [ 1404.507484] env[61545]: _type = "Task" [ 1404.507484] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.516531] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257099, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.647458] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquiring lock "da648e8c-aea3-4731-ad2b-719a15f29abf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.647717] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "da648e8c-aea3-4731-ad2b-719a15f29abf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.647933] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquiring lock "da648e8c-aea3-4731-ad2b-719a15f29abf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.648143] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "da648e8c-aea3-4731-ad2b-719a15f29abf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.648331] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "da648e8c-aea3-4731-ad2b-719a15f29abf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.650762] env[61545]: INFO nova.compute.manager [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Terminating instance [ 1404.717949] env[61545]: DEBUG nova.compute.manager [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1404.718232] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1404.719238] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c18f4a9-047f-4bd1-9da3-f0132ea2ba75 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.728300] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1404.728612] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2d43ec3-ab1d-4ae6-b281-abced548e785 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.737287] env[61545]: DEBUG oslo_vmware.api [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1404.737287] env[61545]: value = "task-4257100" [ 1404.737287] env[61545]: _type = "Task" [ 1404.737287] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.746750] env[61545]: DEBUG oslo_vmware.api [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.020744] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257099, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.154965] env[61545]: DEBUG nova.compute.manager [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1405.155169] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1405.156073] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f077c9-c431-45d9-8d29-973943d26fa9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.164545] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1405.164838] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c31ce94-ea73-4ee4-9a4f-c4e253940b67 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.172616] env[61545]: DEBUG oslo_vmware.api [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1405.172616] env[61545]: value = "task-4257101" [ 1405.172616] env[61545]: _type = "Task" [ 1405.172616] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.182103] env[61545]: DEBUG oslo_vmware.api [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.248560] env[61545]: DEBUG oslo_vmware.api [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257100, 'name': PowerOffVM_Task, 'duration_secs': 0.23329} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.249072] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1405.249137] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1405.249362] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee186196-a174-4981-abfc-cef09d68b998 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.321960] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1405.322285] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1405.322599] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleting the datastore file [datastore2] 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1405.322920] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f37a8d3e-e32c-4620-be2f-6680d710ebe1 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.330907] env[61545]: DEBUG oslo_vmware.api [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for the task: (returnval){ [ 1405.330907] env[61545]: value = "task-4257103" [ 1405.330907] env[61545]: _type = "Task" [ 1405.330907] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.340799] env[61545]: DEBUG oslo_vmware.api [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.520108] env[61545]: DEBUG oslo_vmware.api [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257099, 'name': PowerOnVM_Task, 'duration_secs': 0.554119} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.520108] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.520108] env[61545]: INFO nova.compute.manager [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Took 6.31 seconds to spawn the instance on the hypervisor. [ 1405.520108] env[61545]: DEBUG nova.compute.manager [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1405.520922] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0113c4b5-d67d-4020-a928-f1a47ffd95a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.684213] env[61545]: DEBUG oslo_vmware.api [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257101, 'name': PowerOffVM_Task, 'duration_secs': 0.279292} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.684495] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1405.684663] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1405.684955] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0de4e40f-d28e-4bb1-a3c5-23080ab02051 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.745464] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1405.745729] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1405.745851] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Deleting the datastore file [datastore2] da648e8c-aea3-4731-ad2b-719a15f29abf {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1405.746156] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c6f7b3b-fec0-49ed-bec8-f79617078ff9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.752988] env[61545]: DEBUG oslo_vmware.api [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for the task: (returnval){ [ 1405.752988] env[61545]: value = "task-4257105" [ 1405.752988] env[61545]: _type = "Task" [ 1405.752988] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.761052] env[61545]: DEBUG oslo_vmware.api [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257105, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.841405] env[61545]: DEBUG oslo_vmware.api [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Task: {'id': task-4257103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166968} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.841600] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1405.841785] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1405.841952] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1405.842156] env[61545]: INFO nova.compute.manager [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1405.842415] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1405.842631] env[61545]: DEBUG nova.compute.manager [-] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1405.842732] env[61545]: DEBUG nova.network.neutron [-] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1406.038932] env[61545]: INFO nova.compute.manager [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Took 11.01 seconds to build instance. [ 1406.115370] env[61545]: DEBUG nova.compute.manager [req-fa0bd3ed-6e12-440b-ba35-466ad50a8230 req-785ae8af-8427-47c9-8e9f-cd3300ff2eef service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Received event network-vif-deleted-73eaa178-06d6-45cf-bf15-28ff642a115e {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1406.115666] env[61545]: INFO nova.compute.manager [req-fa0bd3ed-6e12-440b-ba35-466ad50a8230 req-785ae8af-8427-47c9-8e9f-cd3300ff2eef service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Neutron deleted interface 73eaa178-06d6-45cf-bf15-28ff642a115e; detaching it from the instance and deleting it from the info cache [ 1406.115666] env[61545]: DEBUG nova.network.neutron [req-fa0bd3ed-6e12-440b-ba35-466ad50a8230 req-785ae8af-8427-47c9-8e9f-cd3300ff2eef service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.264694] env[61545]: DEBUG oslo_vmware.api [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Task: {'id': task-4257105, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139311} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.265110] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1406.265222] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1406.265467] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1406.265594] env[61545]: INFO nova.compute.manager [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1406.265806] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1406.266014] env[61545]: DEBUG nova.compute.manager [-] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1406.266112] env[61545]: DEBUG nova.network.neutron [-] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1406.540884] env[61545]: DEBUG oslo_concurrency.lockutils [None req-e2e1bd45-0487-4f49-bc3a-4005890469b1 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "bc5f20b7-6ad2-4370-9691-9106fa87daba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.519s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1406.598598] env[61545]: DEBUG nova.network.neutron [-] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.622023] env[61545]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e84a2182-0583-4aea-b662-8372a0c8b508 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.630545] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ffacae-1f36-41f2-ba2f-ac0e42200ad6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.662307] env[61545]: DEBUG nova.compute.manager [req-fa0bd3ed-6e12-440b-ba35-466ad50a8230 req-785ae8af-8427-47c9-8e9f-cd3300ff2eef service nova] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Detach interface failed, port_id=73eaa178-06d6-45cf-bf15-28ff642a115e, reason: Instance 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7 could not be found. {{(pid=61545) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1407.100176] env[61545]: INFO nova.compute.manager [-] [instance: 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7] Took 1.26 seconds to deallocate network for instance. [ 1407.207069] env[61545]: DEBUG nova.network.neutron [-] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.528576] env[61545]: INFO nova.compute.manager [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Rebuilding instance [ 1407.581842] env[61545]: DEBUG nova.compute.manager [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1407.582486] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17945e4-433e-4b2c-b06a-07aca5d771be {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.608325] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.608702] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.608966] env[61545]: DEBUG nova.objects.instance [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lazy-loading 'resources' on Instance uuid 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1407.710089] env[61545]: INFO nova.compute.manager [-] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Took 1.44 seconds to deallocate network for instance. [ 1408.143856] env[61545]: DEBUG nova.compute.manager [req-11608d18-c78f-473e-b4d9-b4fce23dc7c6 req-08034d1d-84ae-4b8f-8a22-26753bb1c74f service nova] [instance: da648e8c-aea3-4731-ad2b-719a15f29abf] Received event network-vif-deleted-62ea8d65-9aae-4400-998b-1a364d16dfcb {{(pid=61545) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1408.163495] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e6c682-55e0-407e-8de3-33d8d971bfb4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.171666] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7905e58a-d64a-4085-b57a-c8eaf3438ffb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.202157] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eabfbad-5924-49a4-a3e2-3b527e33a130 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.210457] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519b51fa-0551-4a4f-b1a3-f4d9d6d39153 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.216736] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1408.227147] env[61545]: DEBUG nova.compute.provider_tree [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1408.598217] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1408.598613] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6afb4c78-c978-4fca-a50d-b488abb79573 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.606823] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1408.606823] env[61545]: value = "task-4257106" [ 1408.606823] env[61545]: _type = "Task" [ 1408.606823] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.615890] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.731466] env[61545]: DEBUG nova.scheduler.client.report [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1409.117654] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257106, 'name': PowerOffVM_Task, 'duration_secs': 0.13758} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.117935] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1409.118198] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1409.119038] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf50155-6363-4a4f-8430-f89cab8d0b1b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.126123] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1409.126363] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37c3bc8f-7421-44d6-8f4a-47ecf4a4b115 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.150570] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1409.150796] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1409.150976] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Deleting the datastore file [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1409.151275] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d47d5d8-38db-465d-8eea-9b2388bc99b4 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.157737] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1409.157737] env[61545]: value = "task-4257108" [ 1409.157737] env[61545]: _type = "Task" [ 1409.157737] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.166518] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.239102] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.630s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1409.241495] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.025s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1409.241725] env[61545]: DEBUG nova.objects.instance [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lazy-loading 'resources' on Instance uuid da648e8c-aea3-4731-ad2b-719a15f29abf {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1409.258944] env[61545]: INFO nova.scheduler.client.report [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Deleted allocations for instance 7c631ea9-26eb-4e36-9b97-6ef372e9f6a7 [ 1409.667653] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094439} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.668074] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1409.668142] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1409.668288] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1409.765909] env[61545]: DEBUG oslo_concurrency.lockutils [None req-b859187d-5e6b-4744-b923-6f86a09be149 tempest-ServerDiskConfigTestJSON-1666434622 tempest-ServerDiskConfigTestJSON-1666434622-project-member] Lock "7c631ea9-26eb-4e36-9b97-6ef372e9f6a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.555s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1409.789322] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6b4c30-3a2a-47f8-8eca-52c69808cfc7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.797314] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b38bed9-f2b4-420e-a809-4dd7b63d531d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.828706] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33adfcff-f856-4621-b3ff-6af642db435b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.836770] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb80ff46-c17a-47c5-8d1c-d616102b2cf7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.850174] env[61545]: DEBUG nova.compute.provider_tree [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1410.354141] env[61545]: DEBUG nova.scheduler.client.report [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1410.751019] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1410.751890] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1410.751890] env[61545]: DEBUG nova.virt.hardware [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1410.753015] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b0af52-d972-4767-809d-fcfd42974c3c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.766035] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6add39dd-d62f-4f51-a547-7632ecbd4a23 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.784212] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1410.789039] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1410.789039] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1410.789260] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4462509e-e963-4c3a-b177-c689b50a955a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.807601] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1410.807601] env[61545]: value = "task-4257109" [ 1410.807601] env[61545]: _type = "Task" [ 1410.807601] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.816195] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257109, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.859447] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1410.884443] env[61545]: INFO nova.scheduler.client.report [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Deleted allocations for instance da648e8c-aea3-4731-ad2b-719a15f29abf [ 1411.319943] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257109, 'name': CreateVM_Task, 'duration_secs': 0.279899} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.320141] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1411.320486] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.320678] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1411.321059] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1411.321335] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d2c8802-7fb4-499a-b1f6-27a310b3e97e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.326431] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1411.326431] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c597d4-eaa8-4efc-7361-98c35eb0c18b" [ 1411.326431] env[61545]: _type = "Task" [ 1411.326431] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.335813] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c597d4-eaa8-4efc-7361-98c35eb0c18b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.393563] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a53dade5-9459-4fc7-949b-b058dc419dcb tempest-ServersTestManualDisk-1260630413 tempest-ServersTestManualDisk-1260630413-project-member] Lock "da648e8c-aea3-4731-ad2b-719a15f29abf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.746s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1411.837805] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52c597d4-eaa8-4efc-7361-98c35eb0c18b, 'name': SearchDatastore_Task, 'duration_secs': 0.012612} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.838145] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1411.838471] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1411.838576] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.838724] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1411.838902] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1411.839195] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa83659e-6230-4e99-9ddb-4d60fdae8b11 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.848780] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1411.848996] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1411.849751] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0db64b20-96d6-4c7c-87bf-45bb6dac1a01 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.855380] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1411.855380] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524c5f81-6c91-44c9-a356-a717ba9ef8c2" [ 1411.855380] env[61545]: _type = "Task" [ 1411.855380] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.863016] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524c5f81-6c91-44c9-a356-a717ba9ef8c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.367192] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]524c5f81-6c91-44c9-a356-a717ba9ef8c2, 'name': SearchDatastore_Task, 'duration_secs': 0.00985} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.368036] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-172b83c0-3f55-46e0-9822-0df0bbbee437 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.376855] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1412.376855] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52300684-ede1-d5fe-f8c2-07e7819bd2a3" [ 1412.376855] env[61545]: _type = "Task" [ 1412.376855] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.386066] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52300684-ede1-d5fe-f8c2-07e7819bd2a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.888471] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52300684-ede1-d5fe-f8c2-07e7819bd2a3, 'name': SearchDatastore_Task, 'duration_secs': 0.010022} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.888806] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1412.889022] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba/bc5f20b7-6ad2-4370-9691-9106fa87daba.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1412.890691] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad660a3a-7ebb-47a3-a373-2296300f4d16 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.897644] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1412.897644] env[61545]: value = "task-4257110" [ 1412.897644] env[61545]: _type = "Task" [ 1412.897644] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.908480] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257110, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.409114] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257110, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486181} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.409342] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba/bc5f20b7-6ad2-4370-9691-9106fa87daba.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1413.409572] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1413.409829] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4ed57aa-dd35-4bde-b28c-7c02554dcd47 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.417456] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1413.417456] env[61545]: value = "task-4257111" [ 1413.417456] env[61545]: _type = "Task" [ 1413.417456] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.425328] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.831592] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1413.831786] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1413.927795] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069554} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.928197] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1413.928850] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ca1a7a-fccb-4e8b-ac41-e216ff0a887d {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.952269] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba/bc5f20b7-6ad2-4370-9691-9106fa87daba.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1413.952654] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a832d482-6da0-4cd0-9210-897fb5f6ce57 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.974789] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1413.974789] env[61545]: value = "task-4257112" [ 1413.974789] env[61545]: _type = "Task" [ 1413.974789] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.983778] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257112, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.334775] env[61545]: DEBUG nova.compute.manager [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Starting instance... {{(pid=61545) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1414.485420] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257112, 'name': ReconfigVM_Task, 'duration_secs': 0.285732} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.485724] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Reconfigured VM instance instance-0000007f to attach disk [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba/bc5f20b7-6ad2-4370-9691-9106fa87daba.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1414.486332] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a4351d1-e02b-4302-8288-e03619b4695c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.493377] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1414.493377] env[61545]: value = "task-4257113" [ 1414.493377] env[61545]: _type = "Task" [ 1414.493377] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.501704] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257113, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.858461] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.858750] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.860296] env[61545]: INFO nova.compute.claims [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.003117] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257113, 'name': Rename_Task, 'duration_secs': 0.146882} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.003486] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1415.003697] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49ed1dfa-9549-43e6-b5e9-c5227a75183b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.010083] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1415.010083] env[61545]: value = "task-4257114" [ 1415.010083] env[61545]: _type = "Task" [ 1415.010083] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.017829] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257114, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.520302] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257114, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.908728] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa4fb2d-30bd-4b40-81da-b74b682a95d9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.916849] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397bfb5a-ccee-436a-9461-6830d0039f5e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.948699] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313b0b85-06a2-40bf-8f9e-2912009016ec {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.957147] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d589e31-eaa3-426c-83fe-287e347a9e85 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.971399] env[61545]: DEBUG nova.compute.provider_tree [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.021955] env[61545]: DEBUG oslo_vmware.api [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257114, 'name': PowerOnVM_Task, 'duration_secs': 0.994443} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.022379] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1416.022658] env[61545]: DEBUG nova.compute.manager [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1416.023856] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29c03ec-eb9b-4abc-a990-283e53ce5e74 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.476046] env[61545]: DEBUG nova.scheduler.client.report [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1416.542439] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.979937] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1416.980456] env[61545]: DEBUG nova.compute.manager [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Start building networks asynchronously for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1416.983010] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.441s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1416.983198] env[61545]: DEBUG nova.objects.instance [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1417.313817] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "bc5f20b7-6ad2-4370-9691-9106fa87daba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1417.314248] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "bc5f20b7-6ad2-4370-9691-9106fa87daba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1417.314363] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "bc5f20b7-6ad2-4370-9691-9106fa87daba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1417.314554] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "bc5f20b7-6ad2-4370-9691-9106fa87daba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1417.314730] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "bc5f20b7-6ad2-4370-9691-9106fa87daba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.316929] env[61545]: INFO nova.compute.manager [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Terminating instance [ 1417.487772] env[61545]: DEBUG nova.compute.utils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Using /dev/sd instead of None {{(pid=61545) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1417.489391] env[61545]: DEBUG nova.compute.manager [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Not allocating networking since 'none' was specified. {{(pid=61545) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1417.820488] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "refresh_cache-bc5f20b7-6ad2-4370-9691-9106fa87daba" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.820722] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquired lock "refresh_cache-bc5f20b7-6ad2-4370-9691-9106fa87daba" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1417.820922] env[61545]: DEBUG nova.network.neutron [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1417.994072] env[61545]: DEBUG nova.compute.manager [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Start building block device mappings for instance. {{(pid=61545) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1417.997057] env[61545]: DEBUG oslo_concurrency.lockutils [None req-4e0eed92-ffc4-4eca-99ec-efe42f8ddc1a tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.343657] env[61545]: DEBUG nova.network.neutron [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.407031] env[61545]: DEBUG nova.network.neutron [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.910178] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Releasing lock "refresh_cache-bc5f20b7-6ad2-4370-9691-9106fa87daba" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1418.910637] env[61545]: DEBUG nova.compute.manager [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1418.910837] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1418.912214] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6926beeb-cf65-4892-922d-5a48eb80be9f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.920196] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1418.920503] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-763fa379-d8f5-42b1-836f-1c90ef46f6d8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.927744] env[61545]: DEBUG oslo_vmware.api [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1418.927744] env[61545]: value = "task-4257115" [ 1418.927744] env[61545]: _type = "Task" [ 1418.927744] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.937022] env[61545]: DEBUG oslo_vmware.api [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.006531] env[61545]: DEBUG nova.compute.manager [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Start spawning the instance on the hypervisor. {{(pid=61545) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1419.037805] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1419.038066] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1419.038233] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1419.038422] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1419.038567] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1419.038715] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1419.038921] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1419.039099] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1419.039279] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1419.039440] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1419.039613] env[61545]: DEBUG nova.virt.hardware [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1419.040557] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e41ece-65ff-4189-9d00-dae2f8359c29 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.048942] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e4efae-4702-4def-ba6b-6a5aa81f0985 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.063907] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1419.070699] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Creating folder: Project (4a60fdb0ec5448f9b93b69d610d76217). Parent ref: group-v838542. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1419.071135] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fbc7ad70-296e-4a43-93d9-b603ded58a63 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.083086] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Created folder: Project (4a60fdb0ec5448f9b93b69d610d76217) in parent group-v838542. [ 1419.083286] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Creating folder: Instances. Parent ref: group-v838886. {{(pid=61545) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1419.083536] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9b92442-b681-4ea0-bac4-ccb1320c2a31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.099967] env[61545]: INFO nova.virt.vmwareapi.vm_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Created folder: Instances in parent group-v838886. [ 1419.100229] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1419.100453] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1419.100700] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c9196de-20b0-41b1-8e1c-7f2a051839a7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.121045] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1419.121045] env[61545]: value = "task-4257118" [ 1419.121045] env[61545]: _type = "Task" [ 1419.121045] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.129149] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257118, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.438025] env[61545]: DEBUG oslo_vmware.api [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257115, 'name': PowerOffVM_Task, 'duration_secs': 0.20206} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.438485] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1419.438485] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1419.438713] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65ef3573-2597-40ed-b110-7049ce1ba11f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.465532] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1419.465770] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1419.465935] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Deleting the datastore file [datastore2] bc5f20b7-6ad2-4370-9691-9106fa87daba {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1419.466234] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-082368e4-a90d-4d0c-bb37-63aa1fe0adc8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.473887] env[61545]: DEBUG oslo_vmware.api [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for the task: (returnval){ [ 1419.473887] env[61545]: value = "task-4257120" [ 1419.473887] env[61545]: _type = "Task" [ 1419.473887] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.482504] env[61545]: DEBUG oslo_vmware.api [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.631188] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257118, 'name': CreateVM_Task, 'duration_secs': 0.291253} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.631404] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1419.631824] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.631990] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1419.632340] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1419.632638] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3425d238-e9b2-4b6e-9f80-5afa9220e47a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.638215] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1419.638215] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b2b0f4-d09e-7d57-c06b-829ae5defc77" [ 1419.638215] env[61545]: _type = "Task" [ 1419.638215] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.646590] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b2b0f4-d09e-7d57-c06b-829ae5defc77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.985859] env[61545]: DEBUG oslo_vmware.api [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Task: {'id': task-4257120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143565} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.986069] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1419.986261] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1419.986437] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1419.986615] env[61545]: INFO nova.compute.manager [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1419.986864] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1419.987073] env[61545]: DEBUG nova.compute.manager [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1419.987171] env[61545]: DEBUG nova.network.neutron [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1420.002910] env[61545]: DEBUG nova.network.neutron [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1420.149084] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52b2b0f4-d09e-7d57-c06b-829ae5defc77, 'name': SearchDatastore_Task, 'duration_secs': 0.016704} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.149331] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1420.149547] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1420.149782] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.149929] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1420.150123] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.150399] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4fd0161-b324-4433-b4c2-672578a367b6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.159505] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.159736] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.160475] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9127b26b-db7c-4075-8a11-f64d1ef0a7b8 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.166326] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1420.166326] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52df24a4-90f3-a299-4380-75f8452cb070" [ 1420.166326] env[61545]: _type = "Task" [ 1420.166326] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.174258] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52df24a4-90f3-a299-4380-75f8452cb070, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.505388] env[61545]: DEBUG nova.network.neutron [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.677298] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52df24a4-90f3-a299-4380-75f8452cb070, 'name': SearchDatastore_Task, 'duration_secs': 0.009685} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.678099] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-486dcff6-7862-45da-b9bd-f567c0da52a0 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.683868] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1420.683868] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5334e-5f95-e126-dee3-815f810fde93" [ 1420.683868] env[61545]: _type = "Task" [ 1420.683868] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.692222] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5334e-5f95-e126-dee3-815f810fde93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.008501] env[61545]: INFO nova.compute.manager [-] [instance: bc5f20b7-6ad2-4370-9691-9106fa87daba] Took 1.02 seconds to deallocate network for instance. [ 1421.194875] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d5334e-5f95-e126-dee3-815f810fde93, 'name': SearchDatastore_Task, 'duration_secs': 0.010812} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.195246] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1421.195543] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09/ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1421.195837] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-756ff786-ea9e-4f23-9fe8-b4a40feb9b93 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.203908] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1421.203908] env[61545]: value = "task-4257121" [ 1421.203908] env[61545]: _type = "Task" [ 1421.203908] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.212748] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.516073] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1421.516073] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1421.516073] env[61545]: DEBUG nova.objects.instance [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lazy-loading 'resources' on Instance uuid bc5f20b7-6ad2-4370-9691-9106fa87daba {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1421.714673] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257121, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502032} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.714979] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09/ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1421.715235] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1421.715507] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0b98452-d1c9-4cec-bdf0-e14b8e3d4d71 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.722143] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1421.722143] env[61545]: value = "task-4257122" [ 1421.722143] env[61545]: _type = "Task" [ 1421.722143] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.732415] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257122, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.067559] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea6318f-468b-4fb8-ba85-3ebef430dd62 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.075726] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abac309-d28f-4765-9683-dbee5fe7cf31 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.106975] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b047b21-04ab-41d2-ab55-58101ff6b589 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.114954] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02aadcb9-e479-4364-b9b8-13fc19f16d7b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.128422] env[61545]: DEBUG nova.compute.provider_tree [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1422.232462] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257122, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067296} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.232755] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1422.233575] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b11e2de-ba60-4795-a199-73e16422bfb6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.253675] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09/ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1422.254029] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3355edd-a027-4b36-b548-67d42de8b9bb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.275365] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1422.275365] env[61545]: value = "task-4257123" [ 1422.275365] env[61545]: _type = "Task" [ 1422.275365] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.284423] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257123, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.631394] env[61545]: DEBUG nova.scheduler.client.report [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1422.785663] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257123, 'name': ReconfigVM_Task, 'duration_secs': 0.286005} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.786015] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Reconfigured VM instance instance-00000080 to attach disk [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09/ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.786709] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7e85bd2-db28-4b64-b64c-073851533ce7 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.794254] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1422.794254] env[61545]: value = "task-4257124" [ 1422.794254] env[61545]: _type = "Task" [ 1422.794254] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.802676] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257124, 'name': Rename_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.136838] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1423.156034] env[61545]: INFO nova.scheduler.client.report [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Deleted allocations for instance bc5f20b7-6ad2-4370-9691-9106fa87daba [ 1423.305724] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257124, 'name': Rename_Task} progress is 14%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.663742] env[61545]: DEBUG oslo_concurrency.lockutils [None req-12fd29c7-6123-4567-96f8-f8d90999a141 tempest-ServerShowV254Test-29696842 tempest-ServerShowV254Test-29696842-project-member] Lock "bc5f20b7-6ad2-4370-9691-9106fa87daba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.349s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1423.804864] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257124, 'name': Rename_Task, 'duration_secs': 0.840173} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.805170] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1423.805419] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3471a933-6429-47c2-bbb2-efea7334920f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.812923] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1423.812923] env[61545]: value = "task-4257125" [ 1423.812923] env[61545]: _type = "Task" [ 1423.812923] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.821380] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.322780] env[61545]: DEBUG oslo_vmware.api [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257125, 'name': PowerOnVM_Task, 'duration_secs': 0.399215} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.322998] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1424.323243] env[61545]: INFO nova.compute.manager [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Took 5.32 seconds to spawn the instance on the hypervisor. [ 1424.323423] env[61545]: DEBUG nova.compute.manager [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1424.324425] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abc2fb0-072f-4bbb-8046-3117f282077b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.844135] env[61545]: INFO nova.compute.manager [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Took 10.00 seconds to build instance. [ 1425.346748] env[61545]: DEBUG oslo_concurrency.lockutils [None req-2cc9423e-d127-479c-97f1-fa628f3511ed tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.515s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1426.110850] env[61545]: INFO nova.compute.manager [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Rebuilding instance [ 1426.157895] env[61545]: DEBUG nova.compute.manager [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1426.158791] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9049ec-fd1a-4243-9145-d454d7893391 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.173089] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1427.173089] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7236e84-862a-423b-b21e-71753f37230a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.180823] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1427.180823] env[61545]: value = "task-4257126" [ 1427.180823] env[61545]: _type = "Task" [ 1427.180823] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.189720] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257126, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.692363] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257126, 'name': PowerOffVM_Task, 'duration_secs': 0.18945} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.692680] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1427.693322] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1427.694102] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5cf6e6-8638-4dd0-add5-616be0d48c56 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.700833] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1427.701081] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9932dd39-af49-4172-8089-ef11b9b073c6 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.725725] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1427.725986] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1427.726138] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Deleting the datastore file [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1427.726412] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b29af460-0a1f-4d19-ae05-8810dc269a5c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.733878] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1427.733878] env[61545]: value = "task-4257128" [ 1427.733878] env[61545]: _type = "Task" [ 1427.733878] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.742276] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.244101] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094576} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.244528] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1428.244528] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1428.244701] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1429.274593] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-03T12:44:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-03T12:44:36Z,direct_url=,disk_format='vmdk',id=bf68eb43-6d66-4532-9eb1-af7d78faa698,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5e1f4f8e2afb442e987b71a3579e05d9',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-03T12:44:37Z,virtual_size=,visibility=), allow threads: False {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1429.275020] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Flavor limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1429.275020] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Image limits 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1429.275205] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Flavor pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1429.275351] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Image pref 0:0:0 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1429.275497] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61545) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1429.275702] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1429.275865] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1429.276043] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Got 1 possible topologies {{(pid=61545) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1429.276209] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1429.276378] env[61545]: DEBUG nova.virt.hardware [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61545) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1429.277560] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54c6618-38b1-4c74-a977-4c515657f187 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.285239] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62a80db-cc82-4779-857d-f7d12c689e6a {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.299752] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Instance VIF info [] {{(pid=61545) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1429.305391] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1429.305614] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Creating VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1429.305818] env[61545]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-715e3753-8326-4ada-8d57-a8db978b0591 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.323013] env[61545]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1429.323013] env[61545]: value = "task-4257129" [ 1429.323013] env[61545]: _type = "Task" [ 1429.323013] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.330644] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257129, 'name': CreateVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.833153] env[61545]: DEBUG oslo_vmware.api [-] Task: {'id': task-4257129, 'name': CreateVM_Task, 'duration_secs': 0.280571} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.833365] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Created VM on the ESX host {{(pid=61545) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1429.833779] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.833959] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1429.834299] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1429.834554] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d485793c-bf51-47cd-b5ac-40cf53444e5c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.839561] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1429.839561] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528f0cc1-200d-9243-4a82-ee72f464aff1" [ 1429.839561] env[61545]: _type = "Task" [ 1429.839561] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.847255] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528f0cc1-200d-9243-4a82-ee72f464aff1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.350403] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]528f0cc1-200d-9243-4a82-ee72f464aff1, 'name': SearchDatastore_Task, 'duration_secs': 0.009568} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.350869] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1430.351069] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Processing image bf68eb43-6d66-4532-9eb1-af7d78faa698 {{(pid=61545) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1430.351316] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.351464] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquired lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1430.351641] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1430.351902] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6014a5dc-2d3c-41eb-86bb-f7f3fde376f9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.360800] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61545) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1430.360973] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61545) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1430.361685] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-854862ac-7de3-4d00-af25-ff69e4f4efdc {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.366673] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1430.366673] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3f1de-f64b-6196-23d0-10964562b4cf" [ 1430.366673] env[61545]: _type = "Task" [ 1430.366673] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.374964] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3f1de-f64b-6196-23d0-10964562b4cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.878890] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52d3f1de-f64b-6196-23d0-10964562b4cf, 'name': SearchDatastore_Task, 'duration_secs': 0.015986} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.879706] env[61545]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9b0c686-47e6-4f00-a1e8-921930f0140c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.885134] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1430.885134] env[61545]: value = "session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f9c90c-4baa-2e39-d997-1a5f77d9e224" [ 1430.885134] env[61545]: _type = "Task" [ 1430.885134] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.893191] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f9c90c-4baa-2e39-d997-1a5f77d9e224, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.395814] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': session[52b70b89-2fe0-381b-9c3d-586b89b90f43]52f9c90c-4baa-2e39-d997-1a5f77d9e224, 'name': SearchDatastore_Task, 'duration_secs': 0.009596} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.396290] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Releasing lock "[datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1431.396343] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09/ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1431.396619] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a07a61e-98f4-49fa-bde7-e6d5b90c189e {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.403531] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1431.403531] env[61545]: value = "task-4257130" [ 1431.403531] env[61545]: _type = "Task" [ 1431.403531] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.411725] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.915216] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442091} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.915523] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/bf68eb43-6d66-4532-9eb1-af7d78faa698/bf68eb43-6d66-4532-9eb1-af7d78faa698.vmdk to [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09/ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09.vmdk {{(pid=61545) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1431.915782] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Extending root virtual disk to 1048576 {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1431.916076] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-659c3678-85a2-4fa3-9098-1e33b1deb010 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.923444] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1431.923444] env[61545]: value = "task-4257131" [ 1431.923444] env[61545]: _type = "Task" [ 1431.923444] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.932376] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257131, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.433830] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257131, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066145} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.434295] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Extended root virtual disk {{(pid=61545) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1432.434911] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac444f25-9bbb-4733-a96a-706c10e00a48 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.455538] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09/ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1432.455802] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfe20979-85f6-4509-ba79-a301d8d29cd3 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.475808] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1432.475808] env[61545]: value = "task-4257132" [ 1432.475808] env[61545]: _type = "Task" [ 1432.475808] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.483633] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257132, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.987300] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257132, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.486778] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257132, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.990450] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257132, 'name': ReconfigVM_Task, 'duration_secs': 1.285883} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.990838] env[61545]: DEBUG nova.virt.vmwareapi.volumeops [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Reconfigured VM instance instance-00000080 to attach disk [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09/ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09.vmdk or device None with type sparse {{(pid=61545) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1433.991551] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-591aca70-f337-4aa7-b7f3-b485a39edc61 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.999499] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1433.999499] env[61545]: value = "task-4257133" [ 1433.999499] env[61545]: _type = "Task" [ 1433.999499] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.008326] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257133, 'name': Rename_Task} progress is 5%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.509627] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257133, 'name': Rename_Task, 'duration_secs': 0.120702} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.510126] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Powering on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1434.510268] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4f93e44-3b59-48a8-8604-f5e9888ecc97 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.516912] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1434.516912] env[61545]: value = "task-4257134" [ 1434.516912] env[61545]: _type = "Task" [ 1434.516912] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.524983] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.026845] env[61545]: DEBUG oslo_vmware.api [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257134, 'name': PowerOnVM_Task, 'duration_secs': 0.408768} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.027127] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Powered on the VM {{(pid=61545) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1435.027342] env[61545]: DEBUG nova.compute.manager [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Checking state {{(pid=61545) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1435.028150] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03691be-c1e3-4d4f-9826-642e1eadab39 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.186130] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.186382] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.546992] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.547318] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.547501] env[61545]: DEBUG nova.objects.instance [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61545) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1435.953258] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.953560] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.953769] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.954027] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.954205] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1435.956385] env[61545]: INFO nova.compute.manager [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Terminating instance [ 1436.186516] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.459565] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "refresh_cache-ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.459783] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquired lock "refresh_cache-ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1436.459928] env[61545]: DEBUG nova.network.neutron [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Building network info cache for instance {{(pid=61545) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.555800] env[61545]: DEBUG oslo_concurrency.lockutils [None req-7200ce22-72e1-490e-bb6f-cf719b15dbdf tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1436.689997] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1436.690304] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1436.690450] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1436.690612] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61545) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1436.691558] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb107dc0-f57d-4ffa-a2d2-406aa183a82b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.700489] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd407053-d99d-480b-b795-ec11707ca5b9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.714700] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b0a958-5588-4d23-b4ec-60cedfde40fb {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.721277] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70177b0a-42cb-49b8-9542-8e1a98c3b41c {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.750561] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180145MB free_disk=246GB free_vcpus=48 pci_devices=None {{(pid=61545) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1436.750774] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1436.750961] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1436.979760] env[61545]: DEBUG nova.network.neutron [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1437.044442] env[61545]: DEBUG nova.network.neutron [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.547247] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Releasing lock "refresh_cache-ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" {{(pid=61545) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1437.547744] env[61545]: DEBUG nova.compute.manager [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Start destroying the instance on the hypervisor. {{(pid=61545) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1437.547988] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Destroying instance {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1437.549040] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f51f976-b5f6-4934-9c73-ff9e67810089 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.557870] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Powering off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1437.558212] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4bd05b1-d69b-4690-9842-3043f5fcd561 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.564998] env[61545]: DEBUG oslo_vmware.api [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1437.564998] env[61545]: value = "task-4257135" [ 1437.564998] env[61545]: _type = "Task" [ 1437.564998] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.572987] env[61545]: DEBUG oslo_vmware.api [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.777454] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Instance ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61545) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1437.777663] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1437.777809] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=250GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61545) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1437.804046] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abecf6aa-bd7c-4981-abd0-6943a622b676 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.811687] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b813966a-0c4f-4fe1-a799-9725a5576cf9 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.841740] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e11fc98-8eba-49cf-a052-1809039cf477 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.849116] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880bffea-36f9-4dae-bbcf-d9cfccd5fef5 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.579620] env[61545]: DEBUG oslo_vmware.api [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257135, 'name': PowerOffVM_Task, 'duration_secs': 0.178706} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.580040] env[61545]: DEBUG nova.compute.provider_tree [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1438.582026] env[61545]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Powered off the VM {{(pid=61545) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1438.582243] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Unregistering the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1438.582485] env[61545]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ae3788f-03d2-4cd8-bd3f-856bd1def12f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.611433] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Unregistered the VM {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1438.611733] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Deleting contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1438.611818] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Deleting the datastore file [datastore2] ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09 {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1438.612116] env[61545]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f2e79b1-9243-4c5e-86f1-8eadba104ab2 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.618606] env[61545]: DEBUG oslo_vmware.api [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for the task: (returnval){ [ 1438.618606] env[61545]: value = "task-4257137" [ 1438.618606] env[61545]: _type = "Task" [ 1438.618606] env[61545]: } to complete. {{(pid=61545) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.626528] env[61545]: DEBUG oslo_vmware.api [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.084075] env[61545]: DEBUG nova.scheduler.client.report [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1439.128364] env[61545]: DEBUG oslo_vmware.api [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Task: {'id': task-4257137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092442} completed successfully. {{(pid=61545) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.128364] env[61545]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Deleted the datastore file {{(pid=61545) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1439.128630] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Deleted contents of the VM from datastore datastore2 {{(pid=61545) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1439.128678] env[61545]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Instance destroyed {{(pid=61545) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1439.128897] env[61545]: INFO nova.compute.manager [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Took 1.58 seconds to destroy the instance on the hypervisor. [ 1439.129161] env[61545]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61545) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1439.129349] env[61545]: DEBUG nova.compute.manager [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Deallocating network for instance {{(pid=61545) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1439.129443] env[61545]: DEBUG nova.network.neutron [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] deallocate_for_instance() {{(pid=61545) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1439.146398] env[61545]: DEBUG nova.network.neutron [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Instance cache missing network info. {{(pid=61545) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1439.589019] env[61545]: DEBUG nova.compute.resource_tracker [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61545) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1439.589436] env[61545]: DEBUG oslo_concurrency.lockutils [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.838s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1439.649347] env[61545]: DEBUG nova.network.neutron [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Updating instance_info_cache with network_info: [] {{(pid=61545) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.152135] env[61545]: INFO nova.compute.manager [-] [instance: ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09] Took 1.02 seconds to deallocate network for instance. [ 1440.658810] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.659249] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.659361] env[61545]: DEBUG nova.objects.instance [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lazy-loading 'resources' on Instance uuid ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09 {{(pid=61545) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1441.191985] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862d6513-4128-41ba-945c-24b56b0d338f {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.199576] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eeb2b4-4ef1-46e9-92c6-0d640d4b787b {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.229940] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3f8d62-d6a1-4849-b17e-10d4b0463251 {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.237531] env[61545]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcc7cd2-a5f9-48fa-8adc-4b7c101f99cf {{(pid=61545) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.250601] env[61545]: DEBUG nova.compute.provider_tree [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Inventory has not changed in ProviderTree for provider: 7015027d-c4e1-4938-ac31-6e4672774d7e {{(pid=61545) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.753968] env[61545]: DEBUG nova.scheduler.client.report [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Inventory has not changed for provider 7015027d-c4e1-4938-ac31-6e4672774d7e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 450, 'reserved': 0, 'min_unit': 1, 'max_unit': 246, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61545) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1442.259840] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1442.279879] env[61545]: INFO nova.scheduler.client.report [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Deleted allocations for instance ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09 [ 1442.589810] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.590105] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.590214] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.590361] env[61545]: DEBUG nova.compute.manager [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61545) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1442.787990] env[61545]: DEBUG oslo_concurrency.lockutils [None req-6f8992b1-8b42-4547-be86-1ee4d2344c6e tempest-ServersListShow2100Test-2012727568 tempest-ServersListShow2100Test-2012727568-project-member] Lock "ad8adc6a-7842-4e29-b2f3-4fbd5dca1c09" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.834s {{(pid=61545) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1443.186930] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.187200] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.182884] env[61545]: DEBUG oslo_service.periodic_task [None req-a4d11931-1397-4d6a-9496-0cc7c7179f34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61545) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}